diff --git a/dataset_info/Agilex_Cobot_Magic_erase_board_passing_left_to_right.yaml b/dataset_info/Agilex_Cobot_Magic_erase_board_passing_left_to_right.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e68b575fddb2572afdf27d6c7942de8ea85fc2eb --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_erase_board_passing_left_to_right.yaml @@ -0,0 +1,507 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_erase_board_passing_left_to_right +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: education + level2: school + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: whiteboard + level1: stationery + level2: whiteboard + level3: null + level4: null + level5: null +- object_name: whiteboard_eraser + level1: stationery + level2: whiteboard_eraser + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use the left gripper to place the eraser in the right, use the right gripper to + pick up the eraser, wipe the notes on the whiteboard clean, and then put it down. +sub_tasks: +- subtask: Right gripper + subtask_index: 0 +- subtask: Move the board eraser to the center of the whiteboard with left gripper + subtask_index: 1 +- subtask: Wipe off the writing on the board with right gripper + subtask_index: 2 +- subtask: Place the board eraser on the right side of board with right gripper + subtask_index: 3 +- subtask: Move the board eraser to the right of the whiteboard with right gripper + subtask_index: 4 +- subtask: Pick up the board eraser with right gripper + subtask_index: 5 +- subtask: Move the board eraser to the right of the whiteboard with left gripper + subtask_index: 6 +- subtask: End + subtask_index: 7 +- subtask: Left gripper + subtask_index: 8 +- subtask: 'null' + subtask_index: 9 +atomic_actions: +- grasp +- lift +- wipe +- handover +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 50 + total_frames: 38393 + fps: 30 + total_tasks: 10 + total_videos: 150 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 335.24 MB +frame_num: 38393 +dataset_size: 335.24 MB +data_structure: 'Agilex_Cobot_Magic_erase_board_passing_left_to_right_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (38 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:49 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_fold_short_sleeve_black.yaml b/dataset_info/Agilex_Cobot_Magic_fold_short_sleeve_black.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b635ffd6ffb8e2080a8ca874bf214c2fdf8b3d5e --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_fold_short_sleeve_black.yaml @@ -0,0 +1,511 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_fold_short_sleeve_black +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: bedroom + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: black_T-shirt + level1: clothing + level2: black_T-shirt + level3: null + level4: null + level5: null +- object_name: green_tray + level1: kitchen_supplies + level2: green_tray + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use two grippers to fold the black short sleeve, and use the left claw to place + the folded black short sleeve on the tray. +sub_tasks: +- subtask: Lift the black T-shirt with the left gripper + subtask_index: 0 +- subtask: Lift the black T-shirt with the right gripper + subtask_index: 1 +- subtask: Grasp the black T-shirt with the left gripper + subtask_index: 2 +- subtask: Fold the black T-shirt downward with the right gripper + subtask_index: 3 +- subtask: Grasp the black T-shirt with the right gripper + subtask_index: 4 +- subtask: Fold the black T-shirt downward with the left gripper + subtask_index: 5 +- subtask: Fold the black T-shirt from right to left with right gripper + subtask_index: 6 +- subtask: abnormal + subtask_index: 7 +- subtask: end + subtask_index: 8 +- subtask: Fold the black T-shirt from left to right with left gripper + subtask_index: 9 +- subtask: Place the folded black T-shirt on the green tray with the left gripper + subtask_index: 10 +- subtask: 'null' + subtask_index: 11 +atomic_actions: +- grasp +- lift +- lower +- fold +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 50 + total_frames: 76223 + fps: 30 + total_tasks: 12 + total_videos: 150 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 999.69 MB +frame_num: 76223 +dataset_size: 999.69 MB +data_structure: 'Agilex_Cobot_Magic_fold_short_sleeve_black_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (38 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:49 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_move_object_green_tablecloth.yaml b/dataset_info/Agilex_Cobot_Magic_move_object_green_tablecloth.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e151892e329001a283393ca0994a457b0e1a5ecf --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_move_object_green_tablecloth.yaml @@ -0,0 +1,1224 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_move_object_green_tablecloth +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: commercial & convenience + level2: supermarket + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: green_table_cloths + level1: laboratory_supplies + level2: green_table_cloths + level3: null + level4: null + level5: null +- object_name: waffle + level1: food + level2: waffle + level3: null + level4: null + level5: null +- object_name: green_lemon + level1: food + level2: green_lemon + level3: null + level4: null + level5: null +- object_name: eggplant + level1: food + level2: eggplant + level3: null + level4: null + level5: null +- object_name: chewing_gum + level1: food + level2: chewing_gum + level3: null + level4: null + level5: null +- object_name: chocolate + level1: food + level2: chocolate + level3: null + level4: null + level5: null +- object_name: mango + level1: food + level2: mango + level3: null + level4: null + level5: null +- object_name: chewing_gum + level1: food + level2: chewing_gum + level3: null + level4: null + level5: null +- object_name: mint_candy + level1: food + level2: mint_candy + level3: null + level4: null + level5: null +- object_name: mangosteen + level1: food + level2: mangosteen + level3: null + level4: null + level5: null +- object_name: orange + level1: food + level2: orange + level3: null + level4: null + level5: null +- object_name: bread + level1: food + level2: bread + level3: null + level4: null + level5: null +- object_name: banana + level1: food + level2: banana + level3: null + level4: Fruit cake + level5: null +- object_name: cake + level1: food + level2: cake + level3: null + level4: null + level5: null +- object_name: beef_cheeseburger + level1: food + level2: beef_cheeseburger + level3: null + level4: null + level5: null +- object_name: bowl + level1: kitchen_supplies + level2: bowl + level3: null + level4: null + level5: null +- object_name: pan + level1: kitchen_supplies + level2: pan + level3: null + level4: null + level5: null +- object_name: small_teapot + level1: kitchen_supplies + level2: small_teapot + level3: null + level4: null + level5: null +- object_name: small_teacup + level1: kitchen_supplies + level2: small_teacup + level3: null + level4: null + level5: null +- object_name: paper_ball + level1: trash + level2: paper_ball + level3: null + level4: null + level5: null +- object_name: brown_square_towel + level1: daily_necessities + level2: brown_square_towel + level3: null + level4: null + level5: null +- object_name: black_cylindrical_pen_holder + level1: stationery + level2: black_cylindrical_pen_holder + level3: null + level4: null + level5: null +- object_name: pink_long_towel + level1: daily_necessities + level2: pink_long_towel + level3: null + level4: null + level5: null +- object_name: whiteboard_eraser + level1: stationery + level2: whiteboard_eraser + level3: null + level4: null + level5: null +- object_name: mentholatum_facial_cleanser + level1: daily_necessities + level2: mentholatum_facial_cleanser + level3: null + level4: null + level5: null +- object_name: duck + level1: toys + level2: duck + level3: null + level4: null + level5: null +- object_name: compass + level1: stationery + level2: compass + level3: null + level4: null + level5: null +- object_name: bowl + level1: kitchen_supplies + level2: bowl + level3: null + level4: null + level5: null +- object_name: blue_long_towel + level1: daily_necessities + level2: blue_long_towel + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- the gripper move the object. +sub_tasks: +- subtask: Place the XX on the table with the left gripper + subtask_index: 0 +- subtask: 'Grasp the blue blackboard earser with the right gripper + + ' + subtask_index: 1 +- subtask: 'Grasp the square chewing gun with the right gripper ' + subtask_index: 2 +- subtask: 'Grasp the pen container with the right gripper + + ' + subtask_index: 3 +- subtask: 'Grasp the blue bowl with the left gripper + + ' + subtask_index: 4 +- subtask: 'Place the hard blackbaord cleanser on the table with the left gripper + + ' + subtask_index: 5 +- subtask: 'Place the blue towel on the table with the right gripper + + ' + subtask_index: 6 +- subtask: 'Grasp the orange with the right gripper + + ' + subtask_index: 7 +- subtask: 'Grasp the white blackboard earser with the left gripper + + ' + subtask_index: 8 +- subtask: 'Place the mangosteen on the table with the left gripper + + ' + subtask_index: 9 +- subtask: Grasp the pen container with the right gripper + subtask_index: 10 +- subtask: Place the pen container on the table with the right gripper + subtask_index: 11 +- subtask: Grasp the white blackboard earser with left gripper + subtask_index: 12 +- subtask: Place the blue bowl on the table with the right gripper + subtask_index: 13 +- subtask: Place the eggplant on the table with the right gripper + subtask_index: 14 +- subtask: 'Place the blue blackboard earser on the table with the right gripper + + ' + subtask_index: 15 +- subtask: 'Place the blue blackboard earser on the table with the right gripper + + ' + subtask_index: 16 +- subtask: 'Place the orange on the table with the right gripper + + ' + subtask_index: 17 +- subtask: 'Place the blue bowl on the table with the right gripper + + ' + subtask_index: 18 +- subtask: 'Place the brown towel on the table with the left gripper + + ' + subtask_index: 19 +- subtask: 'Grasp the cyan cup with the right gripper ' + subtask_index: 20 +- subtask: Place the compasses on the table with the right gripper + subtask_index: 21 +- subtask: Grasp the compasses with the right gripper + subtask_index: 22 +- subtask: 'Grasp the green lemon with the right gripper + + ' + subtask_index: 23 +- subtask: 'Grasp the cyan cup with the left gripper + + ' + subtask_index: 24 +- subtask: 'Place the wallfe on the table with the left gripper + + ' + subtask_index: 25 +- subtask: 'Place the blue bowl on the table with the left gripper + + ' + subtask_index: 26 +- subtask: Grasp the fruit candy with the left gripper + subtask_index: 27 +- subtask: Grasp the orange with the right gripper + subtask_index: 28 +- subtask: Place the green lemon on the table with the right gripper + subtask_index: 29 +- subtask: 'Grasp the cyan cup with the right gripper + + ' + subtask_index: 30 +- subtask: 'Grasp the wallfe with the right gripper + + ' + subtask_index: 31 +- subtask: 'Grasp the green lemon with the left gripper + + ' + subtask_index: 32 +- subtask: 'Place the white blackboard earser on the table with the right gripper + + ' + subtask_index: 33 +- subtask: ' + + Grasp the green lemon with the right gripper' + subtask_index: 34 +- subtask: 'Grasp the square chewing gun with the left gripper + + ' + subtask_index: 35 +- subtask: Grasp the square chewing gum with the left gripper + subtask_index: 36 +- subtask: 'Grasp the with cyan cup the left gripper + + ' + subtask_index: 37 +- subtask: Place the blue bowl on the table with the left gripper + subtask_index: 38 +- subtask: 'Grasp the tea[ot with the left gripper + + ' + subtask_index: 39 +- subtask: 'Grasp the cyan cup with the right gripper ' + subtask_index: 40 +- subtask: 'Place the wallfe on the table with the right gripper + + ' + subtask_index: 41 +- subtask: 'Place the pen container on the table with the right gripper + + ' + subtask_index: 42 +- subtask: Place the eggplant on the table with the left gripper + subtask_index: 43 +- subtask: 'Grasp the hard facial cleanser with the right gripper + + ' + subtask_index: 44 +- subtask: 'Place the ornage on the table with the right gripper + + ' + subtask_index: 45 +- subtask: 'Grasp the pen container with the left gripper + + ' + subtask_index: 46 +- subtask: Place the fruit candy on the table with the left gripper + subtask_index: 47 +- subtask: Grasp the hard facial cleanser with the left gripper + subtask_index: 48 +- subtask: Grasp the chocolate with the right gripper + subtask_index: 49 +- subtask: 'Grasp the blue blackboard earser with the right gripper ' + subtask_index: 50 +- subtask: Grasp the mangosteen with the left gripper + subtask_index: 51 +- subtask: 'Place the square chewing gun on the table with the right gripper + + ' + subtask_index: 52 +- subtask: 'Grasp the brown towel with the left gripper + + ' + subtask_index: 53 +- subtask: 'Place the brown towel on the table with the left gripper + + ' + subtask_index: 54 +- subtask: 'Grasp the brown towel with the right gripper mangosteen + + ' + subtask_index: 55 +- subtask: 'Grasp the blue bowl with the right gripper + + ' + subtask_index: 56 +- subtask: Place the teapot on the table with the left gripper + subtask_index: 57 +- subtask: 'Grasp the blue cup with the left gripper + + ' + subtask_index: 58 +- subtask: Grasp the teacup with the right gripper + subtask_index: 59 +- subtask: Grasp the square chewing gun with the right gripper + subtask_index: 60 +- subtask: Grasp the teapot with the right gripper + subtask_index: 61 +- subtask: Place the pink towel on the table with the left gripper + subtask_index: 62 +- subtask: Grasp the blue bowl with the left gripper + subtask_index: 63 +- subtask: Place the waffle on the table with the right gripper + subtask_index: 64 +- subtask: Place the pen container on the table with the right gripper + subtask_index: 65 +- subtask: Place the mangosteen on the table with the right gripper + subtask_index: 66 +- subtask: 'Grasp the pen container with the left gripper + + ' + subtask_index: 67 +- subtask: Grasp the eggplant with the right gripper + subtask_index: 68 +- subtask: 'Grasp the white blackboard earser with the right gripper ' + subtask_index: 69 +- subtask: Grasp the green lemon with the right gripper + subtask_index: 70 +- subtask: 'Place the eggplant on the table with the left gripper + + ' + subtask_index: 71 +- subtask: Grasp the eggplant with the left gripper + subtask_index: 72 +- subtask: Place the hard facial cleanser on the table with the left gripper + subtask_index: 73 +- subtask: 'Grasp the hard facial cleanser with the right gripper ' + subtask_index: 74 +- subtask: 'Grasp the mangosteen with the left gripper + + ' + subtask_index: 75 +- subtask: 'Place the mangosteen on the table with the right gripper + + ' + subtask_index: 76 +- subtask: End + subtask_index: 77 +- subtask: 'Grasp the wallfe with the right gripper ' + subtask_index: 78 +- subtask: Grasp the white blackboard erasure with the right gripper + subtask_index: 79 +- subtask: Grasp the blue blackboard erasure with the left gripper + subtask_index: 80 +- subtask: 'Grasp the white blackboard earser with the right gripper + + ' + subtask_index: 81 +- subtask: 'Place the orange on the table with the left gripper + + ' + subtask_index: 82 +- subtask: Place the fruit candy on the table with the right gripper + subtask_index: 83 +- subtask: 'Grasp the brown towel with the right gripper + + ' + subtask_index: 84 +- subtask: 'Grasp the orange with the left gripper + + ' + subtask_index: 85 +- subtask: 'Place the compass on the table with the right gripper + + ' + subtask_index: 86 +- subtask: 'Grasp the tea cup with the left gripper + + ' + subtask_index: 87 +- subtask: 'Place the hard facial cleanser on the table with the left gripper + + ' + subtask_index: 88 +- subtask: 'Place the brown towel on the table with the right gripper + + ' + subtask_index: 89 +- subtask: 'Grasp the eggplant with the left gripper + + ' + subtask_index: 90 +- subtask: Place the pen container on the table with the left gripper + subtask_index: 91 +- subtask: 'Grasp the white blackboard earser with the right gripper + + ' + subtask_index: 92 +- subtask: 'Grasp the square chewing gun with the right gripper ' + subtask_index: 93 +- subtask: 'Place the cyan cup on the table with the left gripper + + ' + subtask_index: 94 +- subtask: Grasp the waffle with the right gripper + subtask_index: 95 +- subtask: Grasp the blue bowl with the right gripper + subtask_index: 96 +- subtask: 'Grasp the square chewing gun with the right gripper ' + subtask_index: 97 +- subtask: 'Place the waffle on the table with the right gripper + + ' + subtask_index: 98 +- subtask: 'Grasp the blue bowl with the right gripper ' + subtask_index: 99 +- subtask: Abnormal + subtask_index: 100 +- subtask: 'Grasp the white blackboard earser with the right gripper mangosteen + + ' + subtask_index: 101 +- subtask: "Grasp the blue towel with the right gripper \ + \ \n" + subtask_index: 102 +- subtask: 'Grasp the brown towel with the right gripper + + ' + subtask_index: 103 +- subtask: 'Place the pen container on the table with the right gripper + + ' + subtask_index: 104 +- subtask: Place the square chewing gum on the table with the right gripper + subtask_index: 105 +- subtask: 'Place the green lemon on the table with the right gripper + + ' + subtask_index: 106 +- subtask: 'Place the chocolate on the table with the right gripper + + ' + subtask_index: 107 +- subtask: 'Grasp the square facial square with the right gripper ' + subtask_index: 108 +- subtask: 'Grasp the teapot with the left gripper + + ' + subtask_index: 109 +- subtask: 'Place the pen container on the table with the left gripper + + ' + subtask_index: 110 +- subtask: 'Place the hard facial earser on the table with the right gripper + + ' + subtask_index: 111 +- subtask: Place the teacup on the table with the left gripper + subtask_index: 112 +- subtask: 'Place the sqaure chewing gun on the table with the right gripper + + ' + subtask_index: 113 +- subtask: 'Place the blue cup on the table with the left gripper + + ' + subtask_index: 114 +- subtask: Place the blue blackboard erasure on the table with the left gripper + subtask_index: 115 +- subtask: 'Grasp the wallfe with the right gripper ' + subtask_index: 116 +- subtask: Grasp the cyan cup with the right gripper + subtask_index: 117 +- subtask: Grasp the pink towel with the left gripper + subtask_index: 118 +- subtask: Grasp the mangosteen with the right gripper + subtask_index: 119 +- subtask: 'Place the cyan cup on the table with the left gripper + + ' + subtask_index: 120 +- subtask: 'Place the wallfe on the table with the right gripper + + ' + subtask_index: 121 +- subtask: 'Grasp the square chewing gun with the left gripper + + ' + subtask_index: 122 +- subtask: 'Grasp the blue bowel with the right gripper ' + subtask_index: 123 +- subtask: Grasp the orange with the left gripper + subtask_index: 124 +- subtask: 'Place the sqaure chewing gun on the table with the right gripper + + ' + subtask_index: 125 +- subtask: 'Grasp the sqaure chewing gun with the right gripper + + ' + subtask_index: 126 +- subtask: Place the orange on the table with the right gripper + subtask_index: 127 +- subtask: 'Place the white blackboard earser on the table with the left gripper + + ' + subtask_index: 128 +- subtask: 'Place the teacup on the table with the left gripper + + ' + subtask_index: 129 +- subtask: Grasp the pen container with the right gripper + subtask_index: 130 +- subtask: 'Grasp the blue blackboard earser with the left gripper + + ' + subtask_index: 131 +- subtask: Place the teapot on the table with the right gripper + subtask_index: 132 +- subtask: 'Grasp the wallfe with the right gripper ' + subtask_index: 133 +- subtask: Place the mangosteen on the table with the left gripper + subtask_index: 134 +- subtask: 'Place the square chewing gun on the table with the left gripper + + ' + subtask_index: 135 +- subtask: Grasp the fruit candy with the right gripper + subtask_index: 136 +- subtask: Grasp the square chewing gum with the right gripper + subtask_index: 137 +- subtask: 'Grasp the compass with the right gripper + + ' + subtask_index: 138 +- subtask: 'Place the teapot on the table with the left gripper + + ' + subtask_index: 139 +- subtask: 'Grasp the cyan cup with the left gripper + + ' + subtask_index: 140 +- subtask: Place the teacup on the table with the right gripper + subtask_index: 141 +- subtask: 'Grasp the eggplant with the right gripper + + ' + subtask_index: 142 +- subtask: 'Grasp the pen container with the right gripper mangosteen + + ' + subtask_index: 143 +- subtask: 'Grasp the mangosteen with the right gripper ' + subtask_index: 144 +- subtask: 'Place the green lemon on the table with the left gripper + + ' + subtask_index: 145 +- subtask: Place the pen containeron the table with the left gripper + subtask_index: 146 +- subtask: Place the white blackboard erasure on the table with the right gripper + subtask_index: 147 +- subtask: 'Grasp the hard facial cleanser with the left gripper + + ' + subtask_index: 148 +- subtask: 'Grasp the whiite blackboard earser with the right gripper + + ' + subtask_index: 149 +- subtask: 'Grasp the wallfe with the left gripper + + ' + subtask_index: 150 +- subtask: 'Grasp the wallfe with the right gripper mangosteen + + ' + subtask_index: 151 +- subtask: 'Grasp the chocolate with the right gripper ' + subtask_index: 152 +- subtask: 'Place the cyan cup on the table with the left gripper + + ' + subtask_index: 153 +- subtask: 'Grasp the cyan cup with the right gripper + + ' + subtask_index: 154 +- subtask: 'Place the green lemon on the table with the right gripper + + ' + subtask_index: 155 +- subtask: Place the chocolate on the table with the left gripper + subtask_index: 156 +- subtask: 'Grasp the cyan cup with the right gripper mangosteen + + ' + subtask_index: 157 +- subtask: 'Grasp the eggplant with the right gripper ' + subtask_index: 158 +- subtask: 'Grasp the white blackboard earser with the left gripper + + ' + subtask_index: 159 +- subtask: 'Place the hard facial cleanser on the table with the right gripper + + ' + subtask_index: 160 +- subtask: 'Place the blue blackboard earser on the table with the left gripper + + ' + subtask_index: 161 +- subtask: 'Place the eggplant on the table with the right gripper + + ' + subtask_index: 162 +- subtask: 'Grasp the chocolate with the right gripper + + ' + subtask_index: 163 +- subtask: Grasp the chocolate with the left gripper + subtask_index: 164 +- subtask: Place the orange on the table with the left gripper + subtask_index: 165 +- subtask: Grasp the XX with the left gripper + subtask_index: 166 +- subtask: Place the pen container on the table with the left gripper + subtask_index: 167 +- subtask: Place the chocolate on the table with the right gripper + subtask_index: 168 +- subtask: Place the white blackboard earser on the table with the left gripper + subtask_index: 169 +- subtask: 'Grasp the chocolate with the left gripper + + ' + subtask_index: 170 +- subtask: Grasp the pen container with the left gripper + subtask_index: 171 +- subtask: 'Place the sqaure chewing gun on the table with the left gripper + + ' + subtask_index: 172 +- subtask: 'Place the tea cup on the table with the left gripper + + ' + subtask_index: 173 +- subtask: 'Place the chocolate on the table with the right gripper + + ' + subtask_index: 174 +- subtask: 'Grasp the teacup with the left gripper + + ' + subtask_index: 175 +- subtask: Place the square chewing gum on the table with the left gripper + subtask_index: 176 +- subtask: 'Grasp the brown towel with the right gripper ' + subtask_index: 177 +- subtask: Grasp the teapot with the left gripper + subtask_index: 178 +- subtask: Grasp the teacup with the left gripper + subtask_index: 179 +- subtask: 'Place the chocolate on the table with the left gripper + + ' + subtask_index: 180 +- subtask: 'Place the cyan cup on the table with the right gripper + + ' + subtask_index: 181 +- subtask: 'null' + subtask_index: 182 +atomic_actions: +- grasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 197 + total_frames: 85405 + fps: 30 + total_tasks: 183 + total_videos: 591 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 5.33 GB +frame_num: 85405 +dataset_size: 5.33 GB +data_structure: 'Agilex_Cobot_Magic_move_object_green_tablecloth_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- backup + + | |-- data + + | | `-- chunk-000 + + | `-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (185 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:196 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_move_object_red_tablecloth.yaml b/dataset_info/Agilex_Cobot_Magic_move_object_red_tablecloth.yaml new file mode 100644 index 0000000000000000000000000000000000000000..530c160186fa5199c69a85cae26275908d40f0bd --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_move_object_red_tablecloth.yaml @@ -0,0 +1,913 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_move_object_red_tablecloth +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: commercial & convenience + level2: supermarket + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: red_table_cloths + level1: laboratory_supplies + level2: red_table_cloths + level3: null + level4: null + level5: null +- object_name: waffle + level1: food + level2: waffle + level3: null + level4: null + level5: null +- object_name: green_lemon + level1: food + level2: green_lemon + level3: null + level4: null + level5: null +- object_name: eggplant + level1: food + level2: eggplant + level3: null + level4: null + level5: null +- object_name: chewing_gum + level1: food + level2: chewing_gum + level3: null + level4: null + level5: null +- object_name: chocolate + level1: food + level2: chocolate + level3: null + level4: null + level5: null +- object_name: mango + level1: food + level2: mango + level3: null + level4: null + level5: null +- object_name: chewing_gum + level1: food + level2: chewing_gum + level3: null + level4: null + level5: null +- object_name: mint_candy + level1: food + level2: mint_candy + level3: null + level4: null + level5: null +- object_name: mangosteen + level1: food + level2: mangosteen + level3: null + level4: null + level5: null +- object_name: orange + level1: food + level2: orange + level3: null + level4: null + level5: null +- object_name: bread + level1: food + level2: bread + level3: null + level4: null + level5: null +- object_name: banana + level1: food + level2: banana + level3: null + level4: Fruit cake + level5: null +- object_name: cake + level1: food + level2: cake + level3: null + level4: null + level5: null +- object_name: beef_cheeseburger + level1: food + level2: beef_cheeseburger + level3: null + level4: null + level5: null +- object_name: bowl + level1: kitchen_supplies + level2: bowl + level3: null + level4: null + level5: null +- object_name: pan + level1: kitchen_supplies + level2: pan + level3: null + level4: null + level5: null +- object_name: small_teapot + level1: kitchen_supplies + level2: small_teapot + level3: null + level4: null + level5: null +- object_name: small_teacup + level1: kitchen_supplies + level2: small_teacup + level3: null + level4: null + level5: null +- object_name: paper_ball + level1: trash + level2: paper_ball + level3: null + level4: null + level5: null +- object_name: brown_square_towel + level1: daily_necessities + level2: brown_square_towel + level3: null + level4: null + level5: null +- object_name: black_cylindrical_pen_holder + level1: stationery + level2: black_cylindrical_pen_holder + level3: null + level4: null + level5: null +- object_name: pink_long_towel + level1: daily_necessities + level2: pink_long_towel + level3: null + level4: null + level5: null +- object_name: whiteboard_eraser + level1: stationery + level2: whiteboard_eraser + level3: null + level4: null + level5: null +- object_name: mentholatum_facial_cleanser + level1: daily_necessities + level2: mentholatum_facial_cleanser + level3: null + level4: null + level5: null +- object_name: duck + level1: toys + level2: duck + level3: null + level4: null + level5: null +- object_name: compass + level1: stationery + level2: compass + level3: null + level4: null + level5: null +- object_name: bowl + level1: kitchen_supplies + level2: bowl + level3: null + level4: null + level5: null +- object_name: blue_long_towel + level1: daily_necessities + level2: blue_long_towel + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- the gripper move the object. +sub_tasks: +- subtask: Grasp the pink towel with the right gripper + subtask_index: 0 +- subtask: Place the XX on the table with the left gripper + subtask_index: 1 +- subtask: Place the mint candy on the table with the left gripper + subtask_index: 2 +- subtask: Place the snickers on the table with the right gripper + subtask_index: 3 +- subtask: Grasp the pen container with the right gripper + subtask_index: 4 +- subtask: Grasp the grey towel with the left gripper + subtask_index: 5 +- subtask: Place the eyeglass case on the table with the left gripper + subtask_index: 6 +- subtask: Grasp the white duck with the left gripper + subtask_index: 7 +- subtask: Place the eggplant on the table with the right gripper + subtask_index: 8 +- subtask: Place the blue bowl on the table with the right gripper + subtask_index: 9 +- subtask: Grasp the banana with the left gripper + subtask_index: 10 +- subtask: Place the compasses on the table with the right gripper + subtask_index: 11 +- subtask: Place the sandwich on the table with the left gripper + subtask_index: 12 +- subtask: Place the pink cake on the table with the right gripper + subtask_index: 13 +- subtask: Place the banana on the table with the right gripper + subtask_index: 14 +- subtask: Grasp the compasses with the right gripper + subtask_index: 15 +- subtask: Grasp the orange with the right gripper + subtask_index: 16 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 17 +- subtask: Place the peach on the table with the right gripper + subtask_index: 18 +- subtask: Place the green lemon on the table with the right gripper + subtask_index: 19 +- subtask: Grasp the mint candy with the left gripper + subtask_index: 20 +- subtask: Grasp the sandwich with the right gripper + subtask_index: 21 +- subtask: Grasp the eyeglass case with the left gripper + subtask_index: 22 +- subtask: Place the compasses on the table with the left gripper + subtask_index: 23 +- subtask: Place the eyeglass case on the table with the right gripper + subtask_index: 24 +- subtask: Grasp the square chewing gum with the left gripper + subtask_index: 25 +- subtask: Place the brown towel on the table with the left gripper + subtask_index: 26 +- subtask: Place the blue bowl on the table with the left gripper + subtask_index: 27 +- subtask: Grasp the sandwich biscuit with the right gripper + subtask_index: 28 +- subtask: Place the white blackboard erasure on the table with the left gripper + subtask_index: 29 +- subtask: Grasp the white blackboard erasure with the left gripper + subtask_index: 30 +- subtask: Grasp the snickers with the right gripper + subtask_index: 31 +- subtask: Grasp the eyeglass case with the right gripper + subtask_index: 32 +- subtask: Place the eggplant on the table with the left gripper + subtask_index: 33 +- subtask: Place the mango on the table with the right gripper + subtask_index: 34 +- subtask: Place the hard facial cleanser on the table with the right gripper + subtask_index: 35 +- subtask: Place the mint candy on the table with the right gripper + subtask_index: 36 +- subtask: Grasp the brown towel with the left gripper + subtask_index: 37 +- subtask: Grasp the hard facial cleanser with the left gripper + subtask_index: 38 +- subtask: Grasp the chocolate with the right gripper + subtask_index: 39 +- subtask: Grasp the mangosteen with the left gripper + subtask_index: 40 +- subtask: Grasp the peach with the right gripper + subtask_index: 41 +- subtask: Grasp the brown towel with the right gripper + subtask_index: 42 +- subtask: Grasp the mango with the left gripper + subtask_index: 43 +- subtask: Place the snickers on the table with the left gripper + subtask_index: 44 +- subtask: Grasp the snickers with the left gripper + subtask_index: 45 +- subtask: Grasp the lemon with the left gripper + subtask_index: 46 +- subtask: Place the teapot on the table with the left gripper + subtask_index: 47 +- subtask: Grasp the hard facial cleanser with the right gripper + subtask_index: 48 +- subtask: Grasp the sandwich with the left gripper + subtask_index: 49 +- subtask: Place the white duck on the table with the left gripper + subtask_index: 50 +- subtask: Place the white duck on the table with the right gripper + subtask_index: 51 +- subtask: Grasp the teapot with the right gripper + subtask_index: 52 +- subtask: Place the pink towel on the table with the left gripper + subtask_index: 53 +- subtask: Grasp the hollow ring bread with the right gripper + subtask_index: 54 +- subtask: 'Grasp the hollow ring bread with the right gripper + + ' + subtask_index: 55 +- subtask: Grasp the blue bowl with the left gripper + subtask_index: 56 +- subtask: Place the waffle on the table with the right gripper + subtask_index: 57 +- subtask: Place the pen container on the table with the right gripper + subtask_index: 58 +- subtask: Place the mangosteen on the table with the right gripper + subtask_index: 59 +- subtask: Grasp the eggplant with the right gripper + subtask_index: 60 +- subtask: Grasp the pink cake with the left gripper + subtask_index: 61 +- subtask: Grasp the gray towel with the right gripper + subtask_index: 62 +- subtask: Place the pink towel on the table with the right gripper + subtask_index: 63 +- subtask: Grasp the green lemon with the right gripper + subtask_index: 64 +- subtask: Place the green lemon on the table with the left gripper + subtask_index: 65 +- subtask: Grasp the eggplant with the left gripper + subtask_index: 66 +- subtask: Place the hard facial cleanser on the table with the left gripper + subtask_index: 67 +- subtask: Place the lemon on the table with the left gripper + subtask_index: 68 +- subtask: End + subtask_index: 69 +- subtask: Grasp the white duck with the right gripper + subtask_index: 70 +- subtask: Grasp the white blackboard erasure with the right gripper + subtask_index: 71 +- subtask: Grasp the blue blackboard erasure with the left gripper + subtask_index: 72 +- subtask: Place the sandwich biscuit on the table with the right gripper + subtask_index: 73 +- subtask: Place the sandwich on the table with the right gripper + subtask_index: 74 +- subtask: Place the fruit candy on the table with the right gripper + subtask_index: 75 +- subtask: Place the gray towel on the table with the right gripper + subtask_index: 76 +- subtask: Place the blue cup on the table with the right gripper + subtask_index: 77 +- subtask: Place the pen container on the table with the left gripper + subtask_index: 78 +- subtask: Grasp the compasses with the left gripper + subtask_index: 79 +- subtask: Grasp the waffle with the right gripper + subtask_index: 80 +- subtask: Grasp the blue bowl with the right gripper + subtask_index: 81 +- subtask: Place the pink cake on the table with the left gripper + subtask_index: 82 +- subtask: Abnormal + subtask_index: 83 +- subtask: Place the square chewing gum on the table with the right gripper + subtask_index: 84 +- subtask: Place the blue blackboard erasure on the table with the left gripper + subtask_index: 85 +- subtask: Grasp the pink towel with the left gripper + subtask_index: 86 +- subtask: Grasp the mangosteen with the right gripper + subtask_index: 87 +- subtask: Place the grey towel on the table with the left gripper + subtask_index: 88 +- subtask: Grasp the orange with the left gripper + subtask_index: 89 +- subtask: Place the hollow ring bread on the table with the right gripper + subtask_index: 90 +- subtask: Place the blue cup on the table with the left gripper + subtask_index: 91 +- subtask: Place the orange on the table with the right gripper + subtask_index: 92 +- subtask: Place the teapot on the table with the right gripper + subtask_index: 93 +- subtask: Place the mangosteen on the table with the left gripper + subtask_index: 94 +- subtask: Grasp the fruit candy with the right gripper + subtask_index: 95 +- subtask: Grasp the square chewing gum with the right gripper + subtask_index: 96 +- subtask: Place the white blackboard erasure on the table with the right gripper + subtask_index: 97 +- subtask: Grasp the pink cake with the right gripper + subtask_index: 98 +- subtask: Place the mango on the table with the left gripper + subtask_index: 99 +- subtask: Place the chocolate on the table with the left gripper + subtask_index: 100 +- subtask: 'Place the hollow ring bread on the table with the right gripper + + ' + subtask_index: 101 +- subtask: Grasp the blue blackboard erasure with the right gripper + subtask_index: 102 +- subtask: Place the banana on the table with the left gripper + subtask_index: 103 +- subtask: 'Place the hard facial cleanser on the table with the right gripper + + ' + subtask_index: 104 +- subtask: Place the blue blackboard erasure on the table with the right gripper + subtask_index: 105 +- subtask: Grasp the chocolate with the left gripper + subtask_index: 106 +- subtask: Grasp the mango with the right gripper + subtask_index: 107 +- subtask: Place the orange on the table with the left gripper + subtask_index: 108 +- subtask: Grasp the XX with the left gripper + subtask_index: 109 +- subtask: Place the chocolate on the table with the right gripper + subtask_index: 110 +- subtask: Grasp the blue cup with the right gripper + subtask_index: 111 +- subtask: Grasp the mint candy with the right gripper + subtask_index: 112 +- subtask: Place the brown towel on the table with the right gripper + subtask_index: 113 +- subtask: Grasp the pen container with the left gripper + subtask_index: 114 +- subtask: Grasp the green lemon with the left gripper + subtask_index: 115 +- subtask: Grasp the tea cup with the left gripper + subtask_index: 116 +- subtask: Place the tea cup on the table with the left gripper + subtask_index: 117 +- subtask: Place the square chewing gum on the table with the left gripper + subtask_index: 118 +- subtask: Grasp the teapot with the left gripper + subtask_index: 119 +- subtask: Grasp the banana with the right gripper + subtask_index: 120 +- subtask: 'null' + subtask_index: 121 +atomic_actions: +- rasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 198 + total_frames: 100817 + fps: 30 + total_tasks: 122 + total_videos: 594 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 3.23 GB +frame_num: 100817 +dataset_size: 3.23 GB +data_structure: 'Agilex_Cobot_Magic_move_object_red_tablecloth_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- backup + + | |-- data + + | | `-- chunk-000 + + | `-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (186 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:197 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_storage_object_red_tablecloth.yaml b/dataset_info/Agilex_Cobot_Magic_storage_object_red_tablecloth.yaml new file mode 100644 index 0000000000000000000000000000000000000000..05c45e62e8cc4e0c3938e24a34bea651514f6a26 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_storage_object_red_tablecloth.yaml @@ -0,0 +1,857 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_storage_object_red_tablecloth +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: living_room + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: red_table_cloths + level1: laboratory_supplies + level2: white_table_cloths + level3: null + level4: null + level5: null +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: apple + level1: food + level2: apple + level3: null + level4: null + level5: null +- object_name: yellow_lemon + level1: food + level2: yellow_lemon + level3: null + level4: null + level5: null +- object_name: pomegranate + level1: food + level2: pomegranate + level3: null + level4: null + level5: null +- object_name: bread_dough + level1: food + level2: bread_dough + level3: null + level4: null + level5: null +- object_name: waffle + level1: food + level2: waffle + level3: null + level4: null + level5: null +- object_name: green_lemon + level1: food + level2: green_lemon + level3: null + level4: null + level5: null +- object_name: eggplant + level1: food + level2: eggplant + level3: null + level4: null + level5: null +- object_name: chewing_gum + level1: food + level2: chewing_gum + level3: null + level4: null + level5: null +- object_name: chocolate + level1: food + level2: chocolate + level3: null + level4: null + level5: null +- object_name: mango + level1: food + level2: mango + level3: null + level4: null + level5: null +- object_name: chewing_gum + level1: food + level2: chewing_gum + level3: null + level4: null + level5: null +- object_name: mint_candy + level1: food + level2: mint_candy + level3: null + level4: null + level5: null +- object_name: mangosteen + level1: food + level2: mangosteen + level3: null + level4: null + level5: null +- object_name: orange + level1: food + level2: orange + level3: null + level4: null + level5: null +- object_name: bread + level1: food + level2: bread + level3: null + level4: null + level5: null +- object_name: banana + level1: food + level2: banana + level3: null + level4: Fruit cake + level5: null +- object_name: cake + level1: food + level2: cake + level3: null + level4: null + level5: null +- object_name: beef_cheeseburger + level1: food + level2: beef_cheeseburger + level3: null + level4: null + level5: null +- object_name: bowl + level1: kitchen_supplies + level2: bowl + level3: null + level4: null + level5: null +- object_name: pan + level1: kitchen_supplies + level2: pan + level3: null + level4: null + level5: null +- object_name: small_teapot + level1: kitchen_supplies + level2: small_teapot + level3: null + level4: null + level5: null +- object_name: small_teacup + level1: kitchen_supplies + level2: small_teacup + level3: null + level4: null + level5: null +- object_name: paper_ball + level1: trash + level2: paper_ball + level3: null + level4: null + level5: null +- object_name: brown_square_towel + level1: daily_necessities + level2: brown_square_towel + level3: null + level4: null + level5: null +- object_name: black_cylindrical_pen_holder + level1: stationery + level2: black_cylindrical_pen_holder + level3: null + level4: null + level5: null +- object_name: pink_long_towel + level1: daily_necessities + level2: pink_long_towel + level3: null + level4: null + level5: null +- object_name: whiteboard_eraser + level1: stationery + level2: whiteboard_eraser + level3: null + level4: null + level5: null +- object_name: mentholatum_facial_cleanser + level1: daily_necessities + level2: mentholatum_facial_cleanser + level3: null + level4: null + level5: null +- object_name: duck + level1: toys + level2: duck + level3: null + level4: null + level5: null +- object_name: compass + level1: stationery + level2: compass + level3: null + level4: null + level5: null +- object_name: bowl + level1: kitchen_supplies + level2: bowl + level3: null + level4: null + level5: null +- object_name: blue_long_towel + level1: daily_necessities + level2: blue_long_towel + level3: null + level4: null + level5: null +- object_name: pearMint candy + level1: food + level2: pear + level3: null + level4: null + level5: null +- object_name: mint_candy + level1: food + level2: mint_candy + level3: null + level4: null + level5: null +- object_name: triangular_bread + level1: food + level2: triangular_bread Long Bread + level3: null + level4: null + level5: null +- object_name: long_bread + level1: food + level2: long_bread + level3: null + level4: null + level5: null +- object_name: chinese_cabbage + level1: food + level2: chinese_cabbage + level3: null + level4: null + level5: null +- object_name: peach + level1: food + level2: peach + level3: null + level4: null + level5: null +- object_name: can + level1: food + level2: can + level3: null + level4: null + level5: null +- object_name: bathing_in_flowers + level1: daily_necessities + level2: bathing_in_flowers + level3: null + level4: null + level5: null +- object_name: wok + level1: kitchen_supplies + level2: wok + level3: null + level4: null + level5: null +- object_name: red_bull_canned_drink + level1: beverages + level2: wok + level3: null + level4: null + level5: null +- object_name: eyeglass_case + level1: laboratory_supplies + level2: eyeglass_case + level3: null + level4: null + level5: null +- object_name: coke (Slim Can) + level1: beverages + level2: coke (Slim Can) + level3: null + level4: null + level5: null +- object_name: wahaha_AD_calcium + level1: beverages + level2: wahaha_AD_calcium + level3: null + level4: null + level5: null +- object_name: brave_the_world_beer + level1: beverages + level2: brave_the_world_beer + level3: null + level4: null + level5: null +- object_name: brave_the_world_beer + level1: beverages + level2: brave_the_world_beer + level3: null + level4: null + level5: null +- object_name: shampoo + level1: daily_necessities + level2: shampoo + level3: null + level4: null + level5: null +- object_name: cleanser + level1: daily_necessities + level2: cleanser + level3: null + level4: null + level5: null +- object_name: sausage + level1: food + level2: sausage + level3: null + level4: null + level5: null +- object_name: french_fries + level1: food + level2: french_fries + level3: null + level4: null + level5: null +- object_name: purple_trash_bag + level1: trash + level2: purple_trash_bag + level3: null + level4: null + level5: null +- object_name: red_date + level1: food + level2: red_date + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- pick up an item with a gripper and place it in a random container on the desktop. +sub_tasks: +- subtask: Place the XX into the blue bowl with the left gripper + subtask_index: 0 +- subtask: Place the XX into the purple pot with the right gripper + subtask_index: 1 +- subtask: Place the XX into the pink pot with the right gripper + subtask_index: 2 +- subtask: Grasp the XX with the right gripper + subtask_index: 3 +- subtask: Place the XX into the purple pot with the left gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Place the XX into the pink pot with the left gripper + subtask_index: 6 +- subtask: Place the XX into the cyan plate with the right gripper + subtask_index: 7 +- subtask: Place the XX into the cyan plate with the left gripper + subtask_index: 8 +- subtask: Place the XX into the red pot with the left gripper + subtask_index: 9 +- subtask: Place the XX into the pen container with the right gripper + subtask_index: 10 +- subtask: Grasp the XX with the left gripper + subtask_index: 11 +- subtask: Place the XX into the blue bowl with the right gripper + subtask_index: 12 +- subtask: Place the XX into the pink bowl with the left gripper + subtask_index: 13 +- subtask: Place the XX into the red pot with the right gripper + subtask_index: 14 +- subtask: Place the XX into the pink bowl with the right gripper + subtask_index: 15 +- subtask: Place the XX into the white plate with the left gripper + subtask_index: 16 +- subtask: Place the XX into the white plate with the right gripper + subtask_index: 17 +- subtask: Place the XX into the blue plate with the left gripper + subtask_index: 18 +- subtask: Place the XX into the pen container with the left gripper + subtask_index: 19 +- subtask: Place the XX into the blue plate with the right gripper + subtask_index: 20 +- subtask: 'null' + subtask_index: 21 +atomic_actions: +- grasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 200 + total_frames: 99000 + fps: 30 + total_tasks: 22 + total_videos: 600 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 2.88 GB +frame_num: 99000 +dataset_size: 2.88 GB +data_structure: 'Agilex_Cobot_Magic_storage_object_red_tablecloth_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- backup + + | |-- data + + | | `-- chunk-000 + + | `-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (188 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:199 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_storage_peach_brown_bag.yaml b/dataset_info/Agilex_Cobot_Magic_storage_peach_brown_bag.yaml new file mode 100644 index 0000000000000000000000000000000000000000..690f6e3174c97b3b68e97bc5e8adf579cfadf296 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_storage_peach_brown_bag.yaml @@ -0,0 +1,497 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_storage_peach_brown_bag +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: living_room + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: brown_basket + level1: home_storage + level2: brown_basket + level3: null + level4: null + level5: null +- object_name: brwon_canvas_bags + level1: daily_necessities + level2: brwon_canvas_bags + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- open the brown linen bag and put the peach in the bag. +sub_tasks: +- subtask: Put down the handbag with left grippe + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the peach in the handbag with right gripper + subtask_index: 2 +- subtask: Lift the handbag with left gripper + subtask_index: 3 +- subtask: Grasp the peach with right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 +atomic_actions: +- grasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 100 + total_frames: 49252 + fps: 30 + total_tasks: 6 + total_videos: 300 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 568.64 MB +frame_num: 49252 +dataset_size: 568.64 MB +data_structure: 'Agilex_Cobot_Magic_storage_peach_brown_bag_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (88 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:99 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_close_door_left.yaml b/dataset_info/Airbot_MMK2_close_door_left.yaml new file mode 100644 index 0000000000000000000000000000000000000000..55f83b6fb225b5f031becaeb3313c1058100174f --- /dev/null +++ b/dataset_info/Airbot_MMK2_close_door_left.yaml @@ -0,0 +1,421 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_close_door_left +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: bedroom + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: cabinet + level1: home_storage + level2: cabinet + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- close the cabinet door with your left hand. +sub_tasks: +- subtask: Touch the door with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Close the cupboard door with the left gripper + subtask_index: 2 +- subtask: 'null' + subtask_index: 3 +atomic_actions: +- push +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 50 + total_frames: 5322 + fps: 30 + total_tasks: 4 + total_videos: 200 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 161.77 MB +frame_num: 5322 +dataset_size: 161.77 MB +data_structure: "Airbot_MMK2_close_door_left_qced_hardlink/\n|-- annotations\n| \ + \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| \ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ + \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ + \ `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:49 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_cut_scallion.yaml b/dataset_info/Airbot_MMK2_cut_scallion.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e97e23be44ee8d6c55c6bc3c9a36c9d846b821c9 --- /dev/null +++ b/dataset_info/Airbot_MMK2_cut_scallion.yaml @@ -0,0 +1,433 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_cut_scallion +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: kitchen + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: kitchen_knife + level1: kitchen_supplies + level2: kitchen_knife + level3: null + level4: null + level5: null +- object_name: garlic + level1: vegetables + level2: garlic + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- pick up the kitchen knife with your hand and cut the vegetables. +sub_tasks: +- subtask: Grasp the kitchen knife with the right gripper + subtask_index: 0 +- subtask: Place the kitchen knife back on the knife holder with the right gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Cut scallions with the right gripper + subtask_index: 3 +- subtask: Press the scallion with the left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 +atomic_actions: +- grasp +- pick +- place +- cut +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 97 + total_frames: 33460 + fps: 30 + total_tasks: 6 + total_videos: 388 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 1.17 GB +frame_num: 33460 +dataset_size: 1.17 GB +data_structure: "Airbot_MMK2_cut_scallion_qced_hardlink/\n|-- annotations\n| |--\ + \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ + \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| \ + \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n\ + |-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n\ + | |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| \ + \ |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n\ + | |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| \ + \ |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n\ + | `-- ... (85 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ + | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ + \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:96 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_doodled_line.yaml b/dataset_info/Airbot_MMK2_doodled_line.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9a03592584486ed9c94136eb08b70c5463a4f942 --- /dev/null +++ b/dataset_info/Airbot_MMK2_doodled_line.yaml @@ -0,0 +1,438 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_doodled_line +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: other + level2: laboratory + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: ballpoint_pen + level1: stationery + level2: ballpoint_pen + level3: null + level4: null + level5: null +- object_name: square_building_blocks + level1: toys + level2: square_building_blocks + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- Pick up the ballpoint pen and leave your handwriting on the paper. +sub_tasks: +- subtask: Close the pen switch with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Lift the pen with the right gripper + subtask_index: 2 +- subtask: Grasp the pen with the right gripper + subtask_index: 3 +- subtask: Place the pen on the table with the right gripper + subtask_index: 4 +- subtask: Write on paper with a pen with right gripper + subtask_index: 5 +- subtask: Open the pen switch with the right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 +atomic_actions: +- grasp +- pick +- place +- pressbutton +- write +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 99 + total_frames: 57693 + fps: 30 + total_tasks: 8 + total_videos: 396 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 2.09 GB +frame_num: 57693 +dataset_size: 2.09 GB +data_structure: "Airbot_MMK2_doodled_line_qced_hardlink/\n|-- annotations\n| |--\ + \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ + \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| \ + \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n\ + |-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n\ + | |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| \ + \ |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n\ + | |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| \ + \ |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n\ + | `-- ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ + | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ + \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:98 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_move_block_twice.yaml b/dataset_info/Airbot_MMK2_move_block_twice.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e104ea2d4457c0a315170d1cd8f1390ffcecb6ba --- /dev/null +++ b/dataset_info/Airbot_MMK2_move_block_twice.yaml @@ -0,0 +1,434 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_move_block_twice +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: living_room + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: rubik's_cube + level1: toys + level2: rubik's_cube + level3: null + level4: null + level5: null +- object_name: square_building_blocks + level1: toys + level2: square_building_blocks + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- place the building blocks on the Rubik's Cube with your left hand and take them + down with your right hand. +sub_tasks: +- subtask: Grasp the yellow build block with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the yellow build block on the Rubik's Cube with the left gripper + subtask_index: 2 +- subtask: Place the yellow build block on the table with the right gripper + subtask_index: 3 +- subtask: Grasp the yellow build block with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 +atomic_actions: +- grasp +- pick +- place +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 57 + total_frames: 14643 + fps: 30 + total_tasks: 6 + total_videos: 228 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 666.57 MB +frame_num: 14643 +dataset_size: 666.57 MB +data_structure: "Airbot_MMK2_move_block_twice_qced_hardlink/\n|-- annotations\n| \ + \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| \ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (45 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ + \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ + \ `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:56 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_move_paper_box.yaml b/dataset_info/Airbot_MMK2_move_paper_box.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7ae258fc7ba0996c1b3c3f69ed9eeb9be9d55179 --- /dev/null +++ b/dataset_info/Airbot_MMK2_move_paper_box.yaml @@ -0,0 +1,496 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_move_paper_box +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: study_room + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: paper_box + level1: home_storage + level2: paper_box + level3: null + level4: null + level5: null +- object_name: white_lid + level1: laboratory_supplies + level2: white_lid + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- pick up the cardboard box with both hands and place it on the lid. +sub_tasks: +- subtask: End + subtask_index: 0 +- subtask: Place the phone case box on the white lid with the right gripper + subtask_index: 1 +- subtask: Grasp the mouse box with the left gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: Static + subtask_index: 4 +- subtask: Grasp the phone case box with the right gripper + subtask_index: 5 +- subtask: Place the mouse box on the white lid with the left gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 +atomic_actions: +- grasp +- pick +- place +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 47 + total_frames: 5070 + fps: 30 + total_tasks: 8 + total_videos: 188 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 190.01 MB +frame_num: 5070 +dataset_size: 190.01 MB +data_structure: 'Airbot_MMK2_move_paper_box_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (35 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_front_rgb + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:46 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_organize_plate.yaml b/dataset_info/Airbot_MMK2_organize_plate.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e67757c3e43ec9a60e9dea0d47da720f74970037 --- /dev/null +++ b/dataset_info/Airbot_MMK2_organize_plate.yaml @@ -0,0 +1,436 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_organize_plate +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: kitichen + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: round_plate + level1: kitchen_supplies + level2: plates + level3: null + level4: null + level5: null +- object_name: storage_rack + level1: home_storage + level2: storage_racks + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use both hands to place the stacked plates on the table onto the shelf. +sub_tasks: +- subtask: Press the plate with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the plate into the front mezzanine of the shelf with the right gripper + subtask_index: 2 +- subtask: Press the plate and push it to the right with the left gripper + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: Place the plate into the middle mezzanine of the shelf with the right gripper + subtask_index: 5 +- subtask: Grasp the plate with the right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 +atomic_actions: +- grasp +- pick +- place +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 595 + total_frames: 338107 + fps: 30 + total_tasks: 8 + total_videos: 2380 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 14.21 GB +frame_num: 338107 +dataset_size: 14.21 GB +data_structure: "Airbot_MMK2_organize_plate_qced_hardlink/\n|-- annotations\n| |--\ + \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ + \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| \ + \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n\ + |-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n\ + | |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| \ + \ |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n\ + | |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| \ + \ |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n\ + | `-- ... (583 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ + | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ + \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:594 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_stack_cup.yaml b/dataset_info/Airbot_MMK2_stack_cup.yaml new file mode 100644 index 0000000000000000000000000000000000000000..07e8ba65b1977479519a404163fc61b0c133a27a --- /dev/null +++ b/dataset_info/Airbot_MMK2_stack_cup.yaml @@ -0,0 +1,426 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_stack_cup +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: kitchen + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: cup + level1: kitchen_supplies + level2: cup + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- pick up the cup by hand and stack it on top of another cup. +sub_tasks: +- subtask: Place the purple cup on the pink cup with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the white cup with the right gripper + subtask_index: 2 +- subtask: Place the white cup on the purple cup with the right gripper + subtask_index: 3 +- subtask: Grasp the purple cup with the left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 +atomic_actions: +- grasp +- pick +- place +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 99 + total_frames: 67637 + fps: 30 + total_tasks: 6 + total_videos: 396 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 2.37 GB +frame_num: 67637 +dataset_size: 2.37 GB +data_structure: "Airbot_MMK2_stack_cup_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n\ + | |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n\ + | |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n\ + | |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n|\ + \ `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n\ + | |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| \ + \ |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n\ + | |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| \ + \ |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n\ + | `-- ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ + | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ + \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:98 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_storage_book.yaml b/dataset_info/Airbot_MMK2_storage_book.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c2afa0748a3cdcd197d0ad33ef055c76417b2e52 --- /dev/null +++ b/dataset_info/Airbot_MMK2_storage_book.yaml @@ -0,0 +1,438 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_storage_book +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: study_room + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: bookshelves + level1: bookshelves + level2: bookshelves + level3: null + level4: null + level5: null +- object_name: book + level1: stationery + level2: book + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- pick up the book with right hand and place it on the bookshelf. +sub_tasks: +- subtask: End + subtask_index: 0 +- subtask: Grasp the book with the right gripper + subtask_index: 1 +- subtask: Place the book on the bookshelf with the left gripper + subtask_index: 2 +- subtask: Place the book on the bookshelf with the right gripper + subtask_index: 3 +- subtask: Push the book to the edge of the table with the right gripper + subtask_index: 4 +- subtask: Grasp the book with the left gripper + subtask_index: 5 +- subtask: Grasp the third book from the right with the left gripper + subtask_index: 6 +- subtask: Abnormal + subtask_index: 7 +- subtask: 'null' + subtask_index: 8 +atomic_actions: +- grasp +- pick +- place +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 237 + total_frames: 91429 + fps: 30 + total_tasks: 9 + total_videos: 948 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 4.07 GB +frame_num: 91429 +dataset_size: 4.07 GB +data_structure: "Airbot_MMK2_storage_book_qced_hardlink/\n|-- annotations\n| |--\ + \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ + \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| \ + \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n\ + |-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n\ + | |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| \ + \ |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n\ + | |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| \ + \ |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n\ + | `-- ... (225 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ + | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ + \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:236 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_storage_egg_white_box.yaml b/dataset_info/Airbot_MMK2_storage_egg_white_box.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8b998c18a70413d4cb56c260e96f5c32c69389d2 --- /dev/null +++ b/dataset_info/Airbot_MMK2_storage_egg_white_box.yaml @@ -0,0 +1,437 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_storage_egg_white_box +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: kitchen + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: egg + level1: food + level2: eggs + level3: null + level4: null + level5: null +- object_name: white_box + level1: storage_utensils + level2: storage_box + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use hands to pick the egg on the table and place them into the storage box. +sub_tasks: +- subtask: Place the egg into the left compartment of the storage box with the left + gripper + subtask_index: 0 +- subtask: Grasp the egg with the left gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Grasp the egg with the right gripper + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: Place the egg into the right compartment of the storage box with the right + gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 +atomic_actions: +- grasp +- pick +- place +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 43 + total_frames: 6645 + fps: 30 + total_tasks: 7 + total_videos: 172 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 173.88 MB +frame_num: 6645 +dataset_size: 173.88 MB +data_structure: "Airbot_MMK2_storage_egg_white_box_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (31 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ + \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ + \ `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:42 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_storage_milk_tissue.yaml b/dataset_info/Airbot_MMK2_storage_milk_tissue.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5eec4a0fd794cfde5b056eaf48e237d26d294315 --- /dev/null +++ b/dataset_info/Airbot_MMK2_storage_milk_tissue.yaml @@ -0,0 +1,450 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_storage_milk_tissue +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: kitchen + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: furniture + level2: table + level3: null + level4: null + level5: null +- object_name: basket + level1: home_storage + level2: basket + level3: null + level4: null + level5: null +- object_name: milk + level1: beverages + level2: milk + level3: null + level4: null + level5: null +- object_name: tissue_paper + level1: paper_towels + level2: tissue_paper + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- pick up the tissue with left hand and put it in the basket, then pick up the milk + with right hand and put it in the basket. +sub_tasks: +- subtask: Place the milk on the white basket with the right gripper + subtask_index: 0 +- subtask: Place the tissue on the white basket with the left gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Static + subtask_index: 3 +- subtask: Grasp the milk with the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Grasp the tissue with the left gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 +atomic_actions: +- grasp +- pick +- place +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 48 + total_frames: 10048 + fps: 30 + total_tasks: 8 + total_videos: 192 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 350.34 MB +frame_num: 10048 +dataset_size: 350.34 MB +data_structure: "Airbot_MMK2_storage_milk_tissue_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ + \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ + \ `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:47 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_storage_onion_sweet_potato.yaml b/dataset_info/Airbot_MMK2_storage_onion_sweet_potato.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f08b15a5ca8543fb5bf07785072e9552ac190233 --- /dev/null +++ b/dataset_info/Airbot_MMK2_storage_onion_sweet_potato.yaml @@ -0,0 +1,443 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_storage_onion_sweet_potato +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: Kitchen + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: potato + level1: vegetables + level2: potato + level3: null + level4: null + level5: null +- object_name: onion + level1: vegetables + level2: onion + level3: null + level4: null + level5: null +- object_name: storage_box + level1: storage_utensils + level2: storage_box + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- pick up the potato with left hand and put it in the storage box, and pick up the + onion with right hand and put it in the storage box. +sub_tasks: +- subtask: Grasp the eggplant with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the sweet potato into the left compartment of the storage box with + the left gripper + subtask_index: 2 +- subtask: Place the eggplant into the right compartment of the storage box with the + right gripper + subtask_index: 3 +- subtask: Grasp the sweet potato with the left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 +atomic_actions: +- pinch +- pick +- place +- grasp +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 50 + total_frames: 6502 + fps: 30 + total_tasks: 6 + total_videos: 200 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 173.29 MB +frame_num: 6502 +dataset_size: 173.29 MB +data_structure: "Airbot_MMK2_storage_onion_sweet_potato_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ + \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ + \ `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:49 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_unscrew_bottle_cap.yaml b/dataset_info/Airbot_MMK2_unscrew_bottle_cap.yaml new file mode 100644 index 0000000000000000000000000000000000000000..89fc4e57ad99049dc46ce4ed164d4a8bc6198c14 --- /dev/null +++ b/dataset_info/Airbot_MMK2_unscrew_bottle_cap.yaml @@ -0,0 +1,429 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_unscrew_bottle_cap +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: living_room + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: beverages + level1: beverages + level2: beverages + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- pick up the bottle with left hand and unscrew the cap with right hand. +sub_tasks: +- subtask: Grasp the bottle with the left gripper + subtask_index: 0 +- subtask: Place the bottle on the table with the left gripper + subtask_index: 1 +- subtask: Lift the bottle up with the left gripper + subtask_index: 2 +- subtask: Unscrew the bottle cap with the right hand while holding the bottle with + the left hand + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 +atomic_actions: +- grasp +- pick +- place +- turn +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 50 + total_frames: 16965 + fps: 30 + total_tasks: 6 + total_videos: 200 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 632.68 MB +frame_num: 16965 +dataset_size: 632.68 MB +data_structure: "Airbot_MMK2_unscrew_bottle_cap_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ + \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ + \ `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:49 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Galaxea_R1_Lite_pour_powder_marble_bar_counter.yaml b/dataset_info/Galaxea_R1_Lite_pour_powder_marble_bar_counter.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e219793bad3343dca2950c7349b84423fca6c3dc --- /dev/null +++ b/dataset_info/Galaxea_R1_Lite_pour_powder_marble_bar_counter.yaml @@ -0,0 +1,555 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Galaxea_R1_Lite_pour_powder_marble_bar_counter +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: househhold + level2: itchen + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: marble_bar_counter + level1: furniture + level2: marble_bar_counter + level3: null + level4: null + level5: null +- object_name: plastic_cup + level1: cups + level2: plastic_cup + level3: null + level4: null + level5: null +- object_name: green_dish + level1: plates + level2: green_dish + level3: null + level4: null + level5: null +- object_name: pink_bowl + level1: plastic_bowls + level2: pink_bowl + level3: null + level4: null + level5: null +- object_name: powder + level1: materials + level2: powder + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use a gripper to pick up the cup and pour the powder into a bowl or tray. +sub_tasks: +- subtask: Pour the coffee powder into the pink bowl with right gripper + subtask_index: 0 +- subtask: Pour the powder into the pink bowl with the right gripper + subtask_index: 1 +- subtask: Pick up blue cup filled with milk powder with left gripper + subtask_index: 2 +- subtask: Pour the milk powder into the pink bowl with left gripper + subtask_index: 3 +- subtask: Place the glass cup down with the right gripper + subtask_index: 4 +- subtask: Place blue cup with milk powder on the table with left gripper + subtask_index: 5 +- subtask: Pour the powder into the pink bowl with the left gripper + subtask_index: 6 +- subtask: Pour the coffee powder into the pink bowl with left gripper + subtask_index: 7 +- subtask: Left gripper + subtask_index: 8 +- subtask: Pick up blue cup filled with coffee powder with right gripper + subtask_index: 9 +- subtask: Pick up blue cup filled with milk powder with right gripper + subtask_index: 10 +- subtask: Pick up blue cup filled with coffee powder with left gripper + subtask_index: 11 +- subtask: Place blue cup with coffee powder on the table with right gripper + subtask_index: 12 +- subtask: Pour the powder into the green bowl with the left gripper + subtask_index: 13 +- subtask: Pour the coffee powder into the blue basin with right gripper + subtask_index: 14 +- subtask: Pour the coffee powder into the blue basin with left gripper + subtask_index: 15 +- subtask: Grasp the glass of powder with the right gripper + subtask_index: 16 +- subtask: Place the glass cup down with the left gripper + subtask_index: 17 +- subtask: Pour the milk powder into the pink bowl with right gripper + subtask_index: 18 +- subtask: Pour the powder into the green bowl with the right gripper + subtask_index: 19 +- subtask: Pour the milk powder into the blue basin with left gripper + subtask_index: 20 +- subtask: Place blue cup with milk powder on the table with right gripper + subtask_index: 21 +- subtask: Place blue cup with coffee powder on the table with left gripper + subtask_index: 22 +- subtask: Pour the milk powder into the blue basin with right gripper + subtask_index: 23 +- subtask: End + subtask_index: 24 +- subtask: Grasp the glass of powder with the left gripper + subtask_index: 25 +- subtask: Right gripper + subtask_index: 26 +- subtask: 'null' + subtask_index: 27 +atomic_actions: +- grasp +- pick +- place +- pour +robot_name: +- Galaxea_R1_Lite +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_left_rgb +- cam_head_right_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=360x640x3, resolution=640x360, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=360x640x3, resolution=640x360, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 100 + total_frames: 39829 + fps: 30 + total_tasks: 28 + total_videos: 400 + total_chunks: 1 + chunks_size: 1000 + state_dim: 14 + action_dim: 14 + camera_views: 4 + dataset_size: 1.58 GB +frame_num: 39829 +dataset_size: 1.58 GB +data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_pour_powder_marble_bar_counter_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (88 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_left_rgb + + | |-- observation.images.cam_head_right_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:99 +features: + observation.images.cam_head_left_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_head_right_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 360 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 360 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 360 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 360 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 14 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_gripper_open + - right_gripper_open + action: + dtype: float32 + shape: + - 14 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_gripper_open + - right_gripper_open + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 diff --git a/dataset_info/agilex_cobot_magic_pass_object_left_to_right_white_tablecloth.yaml b/dataset_info/agilex_cobot_magic_pass_object_left_to_right_white_tablecloth.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c5a1816bbc6cc3519011caa7af32555a274768da --- /dev/null +++ b/dataset_info/agilex_cobot_magic_pass_object_left_to_right_white_tablecloth.yaml @@ -0,0 +1,590 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: agilex_cobot_magic_pass_object_left_to_right_white_tablecloth +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: commercial_convenience + level2: supermarket + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: ambrosial_yogurt + level1: food + level2: ambrosial_yogurt + level3: null + level4: null + level5: null +- object_name: banana + level1: food + level2: banana + level3: null + level4: null + level5: null +- object_name: long_bread + level1: food + level2: long_bread + level3: null + level4: null + level5: null +- object_name: milk + level1: food + level2: milk + level3: null + level4: null + level5: null +- object_name: yogurt + level1: food + level2: yogurt + level3: null + level4: null + level5: null +- object_name: grape + level1: food + level2: grape + level3: null + level4: null + level5: null +- object_name: ham_sausage + level1: food + level2: ham_sausage + level3: null + level4: null + level5: null +- object_name: eggplant + level1: food + level2: eggplant + level3: null + level4: null + level5: null +- object_name: chewing_gum + level1: food + level2: chewing_gum + level3: null + level4: null + level5: null +- object_name: eyeglass_case + level1: laboratory_supplies + level2: eyeglass_case + level3: null + level4: null + level5: null +- object_name: rubik's_cube + level1: toys + level2: rubik's_cube + level3: null + level4: null + level5: null +- object_name: purple_trash_bag + level1: trash + level2: purple_trash_bag + level3: null + level4: null + level5: null +- object_name: cleanser + level1: daily_necessities + level2: cleanser + level3: null + level4: null + level5: null +- object_name: bathing_in_flowers + level1: daily_necessities + level2: bathing_in_flowers + level3: null + level4: null + level5: null +- object_name: whiteboard_eraser + level1: stationery + level2: whiteboard_eraser + level3: null + level4: null + level5: null +- object_name: candle + level1: daily_necessities + level2: candle + level3: null + level4: null + level5: null +- object_name: white_table_cloths + level1: laboratory_supplies + level2: white_table_cloths + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use the left gripper to pick up the item and transfer it from the left gripper to + the right gripper. +sub_tasks: +- subtask: Unlabeled + subtask_index: 0 +- subtask: Grasp the Rubik's Cube with the left gripper + subtask_index: 1 +- subtask: Pass the Rubik's Cube to the right gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Place the Rubik's Cube on the table with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 +atomic_actions: +- grasp +- lift +- lower +- handover +- takeover +robot_name: +- agilex_cobot_magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 96 + total_frames: 55704 + fps: 30 + total_tasks: 6 + total_videos: 288 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 546.67 MB +frame_num: 55704 +dataset_size: 546.67 MB +data_structure: 'Agilex_Cobot_Magic_pass_object_left_to_right_white_tablecloth_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (84 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:95 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/info/consolidated_datasets.json b/info/consolidated_datasets.json index 3d8c3d86608f8174cdac992e49c3b89d698e7e76..610d199c0c62777990ab828cd2fef9956e849be8 100644 --- a/info/consolidated_datasets.json +++ b/info/consolidated_datasets.json @@ -526,6 +526,1004 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, + "Agilex_Cobot_Magic_storage_object_red_tablecloth": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_storage_object_red_tablecloth", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "red_table_cloths", + "level1": "laboratory_supplies", + "level2": "white_table_cloths", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "apple", + "level1": "food", + "level2": "apple", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yellow_lemon", + "level1": "food", + "level2": "yellow_lemon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pomegranate", + "level1": "food", + "level2": "pomegranate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread_dough", + "level1": "food", + "level2": "bread_dough", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "waffle", + "level1": "food", + "level2": "waffle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_lemon", + "level1": "food", + "level2": "green_lemon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eggplant", + "level1": "food", + "level2": "eggplant", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chocolate", + "level1": "food", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mango", + "level1": "food", + "level2": "mango", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mint_candy", + "level1": "food", + "level2": "mint_candy", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mangosteen", + "level1": "food", + "level2": "mangosteen", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange", + "level1": "food", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread", + "level1": "food", + "level2": "bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "food", + "level2": "banana", + "level3": null, + "level4": "Fruit cake", + "level5": null + }, + { + "object_name": "cake", + "level1": "food", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "beef_cheeseburger", + "level1": "food", + "level2": "beef_cheeseburger", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "kitchen_supplies", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pan", + "level1": "kitchen_supplies", + "level2": "pan", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "small_teapot", + "level1": "kitchen_supplies", + "level2": "small_teapot", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "small_teacup", + "level1": "kitchen_supplies", + "level2": "small_teacup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "paper_ball", + "level1": "trash", + "level2": "paper_ball", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_square_towel", + "level1": "daily_necessities", + "level2": "brown_square_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "black_cylindrical_pen_holder", + "level1": "stationery", + "level2": "black_cylindrical_pen_holder", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pink_long_towel", + "level1": "daily_necessities", + "level2": "pink_long_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "whiteboard_eraser", + "level1": "stationery", + "level2": "whiteboard_eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_necessities", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "duck", + "level1": "toys", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "compass", + "level1": "stationery", + "level2": "compass", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "kitchen_supplies", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_long_towel", + "level1": "daily_necessities", + "level2": "blue_long_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pearMint candy", + "level1": "food", + "level2": "pear", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mint_candy", + "level1": "food", + "level2": "mint_candy", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "triangular_bread", + "level1": "food", + "level2": "triangular_bread Long Bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "long_bread", + "level1": "food", + "level2": "long_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chinese_cabbage", + "level1": "food", + "level2": "chinese_cabbage", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "food", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "can", + "level1": "food", + "level2": "can", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_necessities", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "wok", + "level1": "kitchen_supplies", + "level2": "wok", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "red_bull_canned_drink", + "level1": "beverages", + "level2": "wok", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eyeglass_case", + "level1": "laboratory_supplies", + "level2": "eyeglass_case", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "coke (Slim Can)", + "level1": "beverages", + "level2": "coke (Slim Can)", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "wahaha_AD_calcium", + "level1": "beverages", + "level2": "wahaha_AD_calcium", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brave_the_world_beer", + "level1": "beverages", + "level2": "brave_the_world_beer", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brave_the_world_beer", + "level1": "beverages", + "level2": "brave_the_world_beer", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "shampoo", + "level1": "daily_necessities", + "level2": "shampoo", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cleanser", + "level1": "daily_necessities", + "level2": "cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "sausage", + "level1": "food", + "level2": "sausage", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "french_fries", + "level1": "food", + "level2": "french_fries", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "purple_trash_bag", + "level1": "trash", + "level2": "purple_trash_bag", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "red_date", + "level1": "food", + "level2": "red_date", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "pick up an item with a gripper and place it in a random container on the desktop." + ], + "sub_tasks": [ + { + "subtask": "Place the XX into the blue bowl with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "Place the XX into the purple pot with the right gripper", + "subtask_index": 1 + }, + { + "subtask": "Place the XX into the pink pot with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Grasp the XX with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Place the XX into the purple pot with the left gripper", + "subtask_index": 4 + }, + { + "subtask": "End", + "subtask_index": 5 + }, + { + "subtask": "Place the XX into the pink pot with the left gripper", + "subtask_index": 6 + }, + { + "subtask": "Place the XX into the cyan plate with the right gripper", + "subtask_index": 7 + }, + { + "subtask": "Place the XX into the cyan plate with the left gripper", + "subtask_index": 8 + }, + { + "subtask": "Place the XX into the red pot with the left gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the XX into the pen container with the right gripper", + "subtask_index": 10 + }, + { + "subtask": "Grasp the XX with the left gripper", + "subtask_index": 11 + }, + { + "subtask": "Place the XX into the blue bowl with the right gripper", + "subtask_index": 12 + }, + { + "subtask": "Place the XX into the pink bowl with the left gripper", + "subtask_index": 13 + }, + { + "subtask": "Place the XX into the red pot with the right gripper", + "subtask_index": 14 + }, + { + "subtask": "Place the XX into the pink bowl with the right gripper", + "subtask_index": 15 + }, + { + "subtask": "Place the XX into the white plate with the left gripper", + "subtask_index": 16 + }, + { + "subtask": "Place the XX into the white plate with the right gripper", + "subtask_index": 17 + }, + { + "subtask": "Place the XX into the blue plate with the left gripper", + "subtask_index": 18 + }, + { + "subtask": "Place the XX into the pen container with the left gripper", + "subtask_index": 19 + }, + { + "subtask": "Place the XX into the blue plate with the right gripper", + "subtask_index": 20 + }, + { + "subtask": "null", + "subtask_index": 21 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 200, + "total_frames": 99000, + "fps": 30, + "total_tasks": 22, + "total_videos": 600, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "2.88 GB" + }, + "frame_num": 99000, + "dataset_size": "2.88 GB", + "data_structure": "Agilex_Cobot_Magic_storage_object_red_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (188 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:199" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] + }, + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, "AgiBot-g1_right_capture_part": { "path": "AgiBot-g1_right_capture_part", "dataset_name": "right_capture_part", @@ -11516,6 +12514,505 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, + "Airbot_MMK2_storage_egg_white_box": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_storage_egg_white_box", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "kitchen", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "egg", + "level1": "food", + "level2": "eggs", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "white_box", + "level1": "storage_utensils", + "level2": "storage_box", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use hands to pick the egg on the table and place them into the storage box." + ], + "sub_tasks": [ + { + "subtask": "Place the egg into the left compartment of the storage box with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "Grasp the egg with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "End", + "subtask_index": 2 + }, + { + "subtask": "Grasp the egg with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Abnormal", + "subtask_index": 4 + }, + { + "subtask": "Place the egg into the right compartment of the storage box with the right gripper", + "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 43, + "total_frames": 6645, + "fps": 30, + "total_tasks": 7, + "total_videos": 172, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "173.88 MB" + }, + "frame_num": 6645, + "dataset_size": "173.88 MB", + "data_structure": "Airbot_MMK2_storage_egg_white_box_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (31 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:42" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, "Galbot_g1_steamer_storage_baozi_b": { "path": "Galbot_g1_steamer_storage_baozi_b", "dataset_name": "steamer_storage_baozi_b", @@ -11734,6 +13231,524 @@ "data_schema": "leju_robot_pass_the_cleaner_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "leju_robot_pass_the_cleaner_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "Agilex_Cobot_Magic_storage_peach_brown_bag": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_storage_peach_brown_bag", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_basket", + "level1": "home_storage", + "level2": "brown_basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brwon_canvas_bags", + "level1": "daily_necessities", + "level2": "brwon_canvas_bags", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "open the brown linen bag and put the peach in the bag." + ], + "sub_tasks": [ + { + "subtask": "Put down the handbag with left grippe", + "subtask_index": 0 + }, + { + "subtask": "End", + "subtask_index": 1 + }, + { + "subtask": "Place the peach in the handbag with right gripper", + "subtask_index": 2 + }, + { + "subtask": "Lift the handbag with left gripper", + "subtask_index": 3 + }, + { + "subtask": "Grasp the peach with right gripper", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 100, + "total_frames": 49252, + "fps": 30, + "total_tasks": 6, + "total_videos": 300, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "568.64 MB" + }, + "frame_num": 49252, + "dataset_size": "568.64 MB", + "data_structure": "Agilex_Cobot_Magic_storage_peach_brown_bag_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (88 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:99" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] + }, + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, "AIRBOT_MMK2_storage_for_building_blocks_and_beauty_sponges": { "path": "AIRBOT_MMK2_storage_for_building_blocks_and_beauty_sponges", "dataset_name": "storage_for_building_blocks_and_beauty_sponges", @@ -21478,515 +23493,7 @@ "data_schema": "AIRBOT_MMK2_diamond_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "AIRBOT_MMK2_diamond_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Split_aloha_plate_storage": { - "path": "Split_aloha_plate_storage", - "dataset_name": "plate_storage", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Pass the plate from the left gripper to the right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate_rack", - "level1": "furniture", - "level2": "plate_rack", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-229687", - "dataset_size": "2.6GB", - "statistics": { - "total_episodes": 499, - "total_frames": 229687, - "total_tasks": 5, - "total_videos": 1497, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "7f2dd151-758c-4265-89b3-5d486f875ef8", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Pass the plate from the left gripper to the right gripper", - "Place the plate on the shelf", - "Pass the plate from the right gripper to the left gripper", - "Pick up the plate with the left gripper", - "Pick up the plate with the right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Split_aloha_plate_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Split_aloha_plate_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AgiBot-g1_box_storage_cardboard_box_a": { - "path": "AgiBot-g1_box_storage_cardboard_box_a", - "dataset_name": "box_storage_cardboard_box_a", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "place", - "pick", - "grasp" - ], - "tasks": "Place the mouse and the power cord paper box into the container.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "paper_box", - "level1": "tool", - "level2": "paper_box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "carton", - "level1": "tool", - "level2": "carton", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-188369", - "dataset_size": "87.9GB", - "statistics": { - "total_episodes": 402, - "total_frames": 188369, - "total_tasks": 1, - "total_videos": 3216, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "15be5a22-893f-4c6d-8562-afa106f6846f", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the mouse and the power cord paper box into the container.", - "Pick up the mouse and the power cord paper box.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AgiBot-g1_box_storage_cardboard_box_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_box_storage_cardboard_box_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "R1_Lite_switch_on_and_off_the_central_air_conditioning": { - "path": "R1_Lite_switch_on_and_off_the_central_air_conditioning", - "dataset_name": "switch_on_and_off_the_central_air_conditioning", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Turn off the central air conditioner", - "objects": [ - { - "object_name": "central_air_conditioning", - "level1": "household_appliances", - "level2": "central_air_conditioning", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-19688", - "dataset_size": "793.3MB", - "statistics": { - "total_episodes": 35, - "total_frames": 19688, - "total_tasks": 1, - "total_videos": 105, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "6d6d5868-e715-41cc-a7df-972218e02479", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Turn off the central air conditioner", - "Press the temperature decrease button", - "Press the temperature increase button", - "Turn on the central air conditioner", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_switch_on_and_off_the_central_air_conditioning_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_switch_on_and_off_the_central_air_conditioning_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "G1edu-u3_plate_storage_rabbit_doll": { - "path": "G1edu-u3_plate_storage_rabbit_doll", - "dataset_name": "plate_storage_rabbit_doll", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place the rabbit doll into the plate with the left gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "rabbit_doll", - "level1": "toy", - "level2": "rabbit_doll", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-76754", - "dataset_size": "378.4MB", - "statistics": { - "total_episodes": 227, - "total_frames": 76754, - "total_tasks": 1, - "total_videos": 227, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "adde8f5b-b32a-49e8-b684-9f89a37ec8e4", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the rabbit doll into the plate with the left gripper", - "Grasp the rabbit doll with the left gripper", - "End", - "Static", - "Abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_plate_storage_rabbit_doll_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.cam_high_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_plate_storage_rabbit_doll_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.cam_high_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "RMC-AIDA-L_box_up_down": { - "path": "RMC-AIDA-L_box_up_down", - "dataset_name": "box_up_down", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Grasp the bandage with right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "container", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-31466", - "dataset_size": "383.7MB", - "statistics": { - "total_episodes": 159, - "total_frames": 31466, - "total_tasks": 2, - "total_videos": 477, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "e095217a-10c3-44e6-9a26-08a1758a1243", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the bandage with right gripper", - "Loosen the box on the table with right gripper", - "Place the box on the table with right gripper", - "Lift the box with left gripper", - "Place the box on the table with left gripper", - "Static", - "End", - "Loosen the box on the table with left gripper", - "Lift the box with right gripper", - "Grasp the bandage with left gripper", - "Abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "RMC-AIDA-L_box_up_down_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_box_up_down_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Airbot_MMK2_storage_ice_cream": { + "Airbot_MMK2_storage_book": { "task_categories": [ "robotics" ], @@ -22016,11 +23523,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_ice_cream", + "dataset_name": "Airbot_MMK2_storage_book", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "kitchen", + "level2": "study_room", "level3": null, "level4": null, "level5": null @@ -22028,17 +23535,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "ice_cream", - "level1": "snacks", - "level2": "ice_cream", + "object_name": "bookshelves", + "level1": "bookshelves", + "level2": "bookshelves", "level3": null, "level4": null, "level5": null }, { - "object_name": "storage_box", - "level1": "laboratory_supplies", - "level2": "storage_box", + "object_name": "book", + "level1": "stationery", + "level2": "book", "level3": null, "level4": null, "level5": null @@ -22046,40 +23553,44 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "put the ice cream into the storage box with left and right hands respectively." + "pick up the book with right hand and place it on the bookshelf." ], "sub_tasks": [ { - "subtask": "Grasp the ice cream with the right gripper", + "subtask": "End", "subtask_index": 0 }, { - "subtask": "Place the ice cream into the white basket with the right gripper", + "subtask": "Grasp the book with the right gripper", "subtask_index": 1 }, { - "subtask": "Abnormal", + "subtask": "Place the book on the bookshelf with the left gripper", "subtask_index": 2 }, { - "subtask": "Static", + "subtask": "Place the book on the bookshelf with the right gripper", "subtask_index": 3 }, { - "subtask": "Place the ice cream into the white basket with the left gripper", + "subtask": "Push the book to the edge of the table with the right gripper", "subtask_index": 4 }, { - "subtask": "End", + "subtask": "Grasp the book with the left gripper", "subtask_index": 5 }, { - "subtask": "Grasp the ice cream with the left gripper", + "subtask": "Grasp the third book from the right with the left gripper", "subtask_index": 6 }, { - "subtask": "null", + "subtask": "Abnormal", "subtask_index": 7 + }, + { + "subtask": "null", + "subtask_index": 8 } ], "atomic_actions": [ @@ -22119,23 +23630,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 47, - "total_frames": 7639, + "total_episodes": 237, + "total_frames": 91429, "fps": 30, - "total_tasks": 8, - "total_videos": 188, + "total_tasks": 9, + "total_videos": 948, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "307.05 MB" + "dataset_size": "4.07 GB" }, - "frame_num": 7639, - "dataset_size": "307.05 MB", - "data_structure": "Airbot_MMK2_storage_ice_cream_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 91429, + "dataset_size": "4.07 GB", + "data_structure": "Airbot_MMK2_storage_book_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (225 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:46" + "train": "0:236" }, "features": { "observation.images.cam_head_rgb": { @@ -22489,126 +24000,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "RMC-AIDA-L_get_water": { - "path": "RMC-AIDA-L_get_water", - "dataset_name": "get_water", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place", - "hold", - "push" - ], - "tasks": "Move the cup beneath the water dispenser nozzle with the right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "faucet", - "level1": "tool", - "level2": "faucet", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cup", - "level1": "container", - "level2": "cup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "water", - "level1": "drink", - "level2": "water", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-327536", - "dataset_size": "2.8GB", - "statistics": { - "total_episodes": 333, - "total_frames": 327536, - "total_tasks": 3, - "total_videos": 999, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "2a2cc6a7-ac07-494e-9b05-0a437532bdb1", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Move the cup beneath the water dispenser nozzle with the right gripper", - "Close the dispenser valve with the right gripper", - "Open the dispenser valve and fill the cup with water with the right gripper", - "Close the dispenser valve with the left gripper", - "Open the dispenser valve and fill the cup with water with the left gripper", - "end", - "Move the cup beneath the water dispenser nozzle with the left gripper", - "abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "RMC-AIDA-L_get_water_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_get_water_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Galaxea_R1_Lite_storage_object_pink_bowl": { + "Galaxea_R1_Lite_pour_powder_marble_bar_counter": { "task_categories": [ "robotics" ], @@ -22638,11 +24030,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_storage_object_pink_bowl", + "dataset_name": "Galaxea_R1_Lite_pour_powder_marble_bar_counter", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "living_room", + "level1": "househhold", + "level2": "itchen", "level3": null, "level4": null, "level5": null @@ -22650,217 +24042,41 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "pink_bowl", - "level1": "plastic_bowl", - "level2": "pink_bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "fruits", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bathing_in_flowers", - "level1": "daily_chemical_products", - "level2": "bathing_in_flowers", + "object_name": "marble_bar_counter", + "level1": "furniture", + "level2": "marble_bar_counter", "level3": null, "level4": null, "level5": null }, { - "object_name": "blue_cup", + "object_name": "plastic_cup", "level1": "cups", - "level2": "blue_cup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "blue_pot", - "level1": "kitchen_supplies", - "level2": "blue_pot", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "toast_slices", - "level1": "bread", - "level2": "toast_slices", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "brown_towel", - "level1": "towels", - "level2": "brown_towel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "can", - "level1": "snacks", - "level2": "can", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "coke", - "level1": "beverages", - "level2": "coke", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "potato_chips", - "level1": "snacks", - "level2": "potato_chips", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chocolate", - "level1": "snacks", - "level2": "chocolate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "compass", - "level1": "stationery", - "level2": "compass", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "block_pillar", - "level1": "toys", - "level2": "block_pillar", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_beater", - "level1": "kitchen_supplies", - "level2": "egg_beater", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "eraser", - "level1": "stationery", - "level2": "eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chewing_gum", - "level1": "snacks", - "level2": "chewing_gum", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mentholatum_facial_cleanser", - "level1": "daily_necessities", - "level2": "mentholatum_facial_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "green_lemon", - "level1": "fruits", - "level2": "green_lemon", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peach", - "level1": "fruits", - "level2": "peach", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "power_strip", - "level1": "appliances", - "level2": "power_strip", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "round_bread", - "level1": "bread", - "level2": "round_bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mentholatum_facial_cleanser", - "level1": "daily_chemical_products", - "level2": "mentholatum_facial_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "square_building_blocks", - "level1": "toys", - "level2": "square_building_blocks", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tape", - "level1": "stationery", - "level2": "tape", + "level2": "plastic_cup", "level3": null, "level4": null, "level5": null }, { - "object_name": "cake", - "level1": "bread", - "level2": "cake", + "object_name": "green_dish", + "level1": "plates", + "level2": "green_dish", "level3": null, "level4": null, "level5": null }, { - "object_name": "duck", - "level1": "doll", - "level2": "duck", + "object_name": "pink_bowl", + "level1": "plastic_bowls", + "level2": "pink_bowl", "level3": null, "level4": null, "level5": null }, { - "object_name": "ambrosial_yogurt", - "level1": "beverages", - "level2": "ambrosial_yogurt", + "object_name": "powder", + "level1": "materials", + "level2": "powder", "level3": null, "level4": null, "level5": null @@ -22868,477 +24084,230 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "use a gripper to pick the target object and place on the pink bowl." + "use a gripper to pick up the cup and pour the powder into a bowl or tray." ], "sub_tasks": [ { - "subtask": "Place the bread slice on the pink bowl with the left gripper", + "subtask": "Pour the coffee powder into the pink bowl with right gripper", "subtask_index": 0 }, { - "subtask": "Place the round wooden block on the pink bowl with the left gripper", + "subtask": "Pour the powder into the pink bowl with the right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the blue pot with the left gripper", + "subtask": "Pick up blue cup filled with milk powder with left gripper", "subtask_index": 2 }, { - "subtask": "Grasp the plugboard with the left gripper", + "subtask": "Pour the milk powder into the pink bowl with left gripper", "subtask_index": 3 }, { - "subtask": "Place the chocolate on the pink bowl with the right gripper", + "subtask": "Place the glass cup down with the right gripper", "subtask_index": 4 }, { - "subtask": "Grasp the potato chips with the right gripper", + "subtask": "Place blue cup with milk powder on the table with left gripper", "subtask_index": 5 }, { - "subtask": "Place the bread slice on the pink bowl with the right gripper", + "subtask": "Pour the powder into the pink bowl with the left gripper", "subtask_index": 6 }, { - "subtask": "Grasp the banana with the left gripper", + "subtask": "Pour the coffee powder into the pink bowl with left gripper", "subtask_index": 7 }, { - "subtask": "Place the round bread on the pink bowl with the right gripper", + "subtask": "Left gripper", "subtask_index": 8 }, { - "subtask": "Grasp the compasses with the right gripper", + "subtask": "Pick up blue cup filled with coffee powder with right gripper", "subtask_index": 9 }, { - "subtask": "Grasp the duck toy with the left gripper", + "subtask": "Pick up blue cup filled with milk powder with right gripper", "subtask_index": 10 }, { - "subtask": "Place the banana on the pink bowl with the left gripper", + "subtask": "Pick up blue cup filled with coffee powder with left gripper", "subtask_index": 11 }, { - "subtask": "Place the hard facial cleanser on the pink bowl with the right gripper", + "subtask": "Place blue cup with coffee powder on the table with right gripper", "subtask_index": 12 }, { - "subtask": "Grasp the blue cup with the left gripper", + "subtask": "Pour the powder into the green bowl with the left gripper", "subtask_index": 13 }, { - "subtask": "Place the brown towel on the pink bowl with the right gripper", + "subtask": "Pour the coffee powder into the blue basin with right gripper", "subtask_index": 14 }, { - "subtask": "Place the duck toy on the pink bowl with the right gripper", + "subtask": "Pour the coffee powder into the blue basin with left gripper", "subtask_index": 15 }, { - "subtask": "Place the coke on the pink bowl with the right gripper", + "subtask": "Grasp the glass of powder with the right gripper", "subtask_index": 16 }, { - "subtask": "Grasp the square chewing gum with the left gripper", + "subtask": "Place the glass cup down with the left gripper", "subtask_index": 17 }, { - "subtask": "Grasp the chocolate cake with the right gripper", + "subtask": "Pour the milk powder into the pink bowl with right gripper", "subtask_index": 18 }, { - "subtask": "Place the banana on the pink bowl with the right gripper", + "subtask": "Pour the powder into the green bowl with the right gripper", "subtask_index": 19 }, { - "subtask": "Grasp the shower sphere with the left gripper", + "subtask": "Pour the milk powder into the blue basin with left gripper", "subtask_index": 20 }, { - "subtask": "Grasp the plugboard with the right gripper", + "subtask": "Place blue cup with milk powder on the table with right gripper", "subtask_index": 21 }, { - "subtask": "Grasp the yogurt with the right gripper", + "subtask": "Place blue cup with coffee powder on the table with left gripper", "subtask_index": 22 }, { - "subtask": "Grasp the tin with the left gripper", + "subtask": "Pour the milk powder into the blue basin with right gripper", "subtask_index": 23 }, { - "subtask": "Grasp the brown towel with the left gripper", + "subtask": "End", "subtask_index": 24 }, { - "subtask": "Place the peach on the pink bowl with the left gripper", + "subtask": "Grasp the glass of powder with the left gripper", "subtask_index": 25 }, { - "subtask": "Grasp the chocolate with the right gripper", + "subtask": "Right gripper", "subtask_index": 26 }, { - "subtask": "Place the tape on the pink bowl with the right gripper", + "subtask": "null", "subtask_index": 27 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place", + "pour" + ], + "robot_name": [ + "Galaxea_R1_Lite" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_left_rgb", + "cam_head_right_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 100, + "total_frames": 39829, + "fps": 30, + "total_tasks": 28, + "total_videos": 400, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "1.58 GB" + }, + "frame_num": 39829, + "dataset_size": "1.58 GB", + "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_pour_powder_marble_bar_counter_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (88 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:99" + }, + "features": { + "observation.images.cam_head_left_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } }, - { - "subtask": "Grasp the peach with the right gripper", - "subtask_index": 28 - }, - { - "subtask": "Grasp the brown towel with the right gripper", - "subtask_index": 29 - }, - { - "subtask": "Grasp the back scratcher with the left gripper", - "subtask_index": 30 - }, - { - "subtask": "Place the round wooden block on the pink bowl with the right gripper", - "subtask_index": 31 - }, - { - "subtask": "Place the tape on the pink bowl with the left gripper", - "subtask_index": 32 - }, - { - "subtask": "Place the duck toy on the pink bowl with the left gripper", - "subtask_index": 33 - }, - { - "subtask": "Place the shower sphere on the pink bowl with the left gripper", - "subtask_index": 34 - }, - { - "subtask": "Place the blue cup on the pink bowl with the left gripper", - "subtask_index": 35 - }, - { - "subtask": "Grasp the hard facial cleanser with the right gripper", - "subtask_index": 36 - }, - { - "subtask": "Place the blue pot on the pink bowl with the left gripper", - "subtask_index": 37 - }, - { - "subtask": "Place the coke on the pink bowl with the left gripper", - "subtask_index": 38 - }, - { - "subtask": "Place the blue cup on the pink bowl with the right gripper", - "subtask_index": 39 - }, - { - "subtask": "Grasp the peach with the left gripper", - "subtask_index": 40 - }, - { - "subtask": "Place the potato chips on the pink bowl with the right gripper", - "subtask_index": 41 - }, - { - "subtask": "Place the square chewing gum on the pink bowl with the left gripper", - "subtask_index": 42 - }, - { - "subtask": "Place the square wooden block on the pink bowl with the right gripper", - "subtask_index": 43 - }, - { - "subtask": "Place the blackboard erasure on the pink bowl with the left gripper", - "subtask_index": 44 - }, - { - "subtask": "Grasp the bread slice with the right gripper", - "subtask_index": 45 - }, - { - "subtask": "Grasp the potato chips with the left gripper", - "subtask_index": 46 - }, - { - "subtask": "Grasp the duck toy with the right gripper", - "subtask_index": 47 - }, - { - "subtask": "End", - "subtask_index": 48 - }, - { - "subtask": "Place the compasses on the pink bowl with the left gripper", - "subtask_index": 49 - }, - { - "subtask": "Place the blue pot on the pink bowl with the right gripper", - "subtask_index": 50 - }, - { - "subtask": "Grasp the blackboard erasure with the left gripper", - "subtask_index": 51 - }, - { - "subtask": "Grasp the coke with the left gripper", - "subtask_index": 52 - }, - { - "subtask": "Place the chocolate cake on the pink bowl with the right gripper", - "subtask_index": 53 - }, - { - "subtask": "Grasp the round wooden block with the left gripper", - "subtask_index": 54 - }, - { - "subtask": "Place the blackboard erasure on the pink bowl with the right gripper", - "subtask_index": 55 - }, - { - "subtask": "Grasp the compasses with the left gripper", - "subtask_index": 56 - }, - { - "subtask": "Place the peach on the pink bowl with the right gripper", - "subtask_index": 57 - }, - { - "subtask": "Place the soft facial cleanser on the pink bowl with the right gripper", - "subtask_index": 58 - }, - { - "subtask": "Grasp the blue pot with the right gripper", - "subtask_index": 59 - }, - { - "subtask": "Grasp the round bread with the right gripper", - "subtask_index": 60 - }, - { - "subtask": "Place the plugboard on the pink bowl with the left gripper", - "subtask_index": 61 - }, - { - "subtask": "Grasp the tin with the right gripper", - "subtask_index": 62 - }, - { - "subtask": "Place the square chewing gum on the pink bowl with the right gripper", - "subtask_index": 63 - }, - { - "subtask": "Place the compasses on the pink bowl with the right gripper", - "subtask_index": 64 - }, - { - "subtask": "Grasp the tape with the right gripper", - "subtask_index": 65 - }, - { - "subtask": "Place the shower sphere on the pink bowl with the right gripper", - "subtask_index": 66 - }, - { - "subtask": "Grasp the coke with the right gripper", - "subtask_index": 67 - }, - { - "subtask": "Grasp the tape with the left gripper", - "subtask_index": 68 - }, - { - "subtask": "Place the tin on the pink bowl with the right gripper", - "subtask_index": 69 - }, - { - "subtask": "Grasp the square chewing gum with the right gripper", - "subtask_index": 70 - }, - { - "subtask": "Grasp the shower sphere with the right gripper", - "subtask_index": 71 - }, - { - "subtask": "Grasp the bread slice with the left gripper", - "subtask_index": 72 - }, - { - "subtask": "Grasp the blackboard erasure with the right gripper", - "subtask_index": 73 - }, - { - "subtask": "Place the brown towel on the pink bowl with the left gripper", - "subtask_index": 74 - }, - { - "subtask": "Place the square wooden block on the pink bowl with the left gripper", - "subtask_index": 75 - }, - { - "subtask": "Grasp the square wooden block with the left gripper", - "subtask_index": 76 - }, - { - "subtask": "Grasp the blue cup with the right gripper", - "subtask_index": 77 - }, - { - "subtask": "Place the back scratcher on the pink bowl with the left gripper", - "subtask_index": 78 - }, - { - "subtask": "Grasp the soft facial cleanser with the right gripper", - "subtask_index": 79 - }, - { - "subtask": "Place the green lemon on the pink bowl with the left gripper", - "subtask_index": 80 - }, - { - "subtask": "Grasp the square wooden block with the right gripper", - "subtask_index": 81 - }, - { - "subtask": "Grasp the green lemon with the left gripper", - "subtask_index": 82 - }, - { - "subtask": "Place the tin on the pink bowl with the left gripper", - "subtask_index": 83 - }, - { - "subtask": "Place the yogurt on the pink bowl with the right gripper", - "subtask_index": 84 - }, - { - "subtask": "Place the potato chips on the pink bowl with the left gripper", - "subtask_index": 85 - }, - { - "subtask": "Grasp the round wooden block with the right gripper", - "subtask_index": 86 - }, - { - "subtask": "Grasp the banana with the right gripper", - "subtask_index": 87 - }, - { - "subtask": "Place the plugboard on the pink bowl with the right gripper", - "subtask_index": 88 - }, - { - "subtask": "null", - "subtask_index": 89 - } - ], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "robot_name": [ - "Galaxea_R1_Lite" - ], - "end_effector_type": "two_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb" - ], - "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" - }, - "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" - ], - "statistics": { - "total_episodes": 102, - "total_frames": 20095, - "fps": 30, - "total_tasks": 90, - "total_videos": 408, - "total_chunks": 1, - "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, - "camera_views": 4, - "dataset_size": "752.15 MB" - }, - "frame_num": 20095, - "dataset_size": "752.15 MB", - "data_structure": "Galaxea_R1_Lite_storage_object_pink_bowl_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (90 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", - "splits": { - "train": "0:101" - }, - "features": { - "observation.images.cam_head_left_rgb": { - "dtype": "video", - "shape": [ - 720, - 1280, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 720, - "video.width": 1280, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_head_right_rgb": { - "dtype": "video", - "shape": [ - 720, - 1280, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 720, - "video.width": 1280, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } }, "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 360, + 640, 3 ], "names": [ @@ -23347,8 +24316,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 360, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -23360,8 +24329,8 @@ "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 360, + 640, 3 ], "names": [ @@ -23370,8 +24339,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 360, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -23655,12 +24624,12 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "leju_robot_hotel_services_ab": { - "path": "leju_robot_hotel_services_ab", - "dataset_name": "hotel_services_ab", + "Split_aloha_plate_storage": { + "path": "Split_aloha_plate_storage", + "dataset_name": "plate_storage", "robot_type": "", "end_effector_type": [ - "five_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ @@ -23668,7 +24637,7 @@ "pick", "place" ], - "tasks": "End", + "tasks": "Pass the plate from the left gripper to the right gripper", "objects": [ { "object_name": "table", @@ -23679,43 +24648,35 @@ "level5": null }, { - "object_name": "box", - "level1": "home_storage", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "card", - "level1": "nfc", - "level2": "card", + "object_name": "plate", + "level1": "container", + "level2": "plate", "level3": null, "level4": null, "level5": null }, { - "object_name": "sensor", - "level1": "electronic_products", - "level2": "sensor", + "object_name": "plate_rack", + "level1": "furniture", + "level2": "plate_rack", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-81123", - "dataset_size": "5.2GB", + "operation_platform_height": 77.2, + "frame_range": "0-229687", + "dataset_size": "2.6GB", "statistics": { - "total_episodes": 450, - "total_frames": 81123, - "total_tasks": 1, - "total_videos": 1350, + "total_episodes": 499, + "total_frames": 229687, + "total_tasks": 5, + "total_videos": 1497, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "e7a6fa76-d4ee-4f1a-b951-26d50fb71f0d", + "dataset_uuid": "7f2dd151-758c-4265-89b3-5d486f875ef8", "language": [ "en", "zh" @@ -23724,11 +24685,11 @@ "robotics" ], "sub_tasks": [ - "End", - "Take out the room card with right gripper", - "Hand the room card to the guest with right gripper", - "Hand the room card to the target.", - "Pick up the room card from the card holder.", + "Pass the plate from the left gripper to the right gripper", + "Place the plate on the shelf", + "Pass the plate from the right gripper to the left gripper", + "Pick up the plate with the left gripper", + "Pick up the plate with the right gripper", "null" ], "annotations": { @@ -23766,23 +24727,23 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "leju_robot_hotel_services_ab_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_hotel_services_ab_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Split_aloha_plate_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Split_aloha_plate_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "AIRBOT_MMK2_cup_storage": { - "path": "AIRBOT_MMK2_cup_storage", - "dataset_name": "cup_storage", + "AgiBot-g1_box_storage_cardboard_box_a": { + "path": "AgiBot-g1_box_storage_cardboard_box_a", + "dataset_name": "box_storage_cardboard_box_a", "robot_type": "", "end_effector_type": [ - "five_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ - "grasp", + "place", "pick", - "place" + "grasp" ], - "tasks": "Place the blue cup on the plate with the left gripper", + "tasks": "Place the mouse and the power cord paper box into the container.", "objects": [ { "object_name": "table", @@ -23793,35 +24754,35 @@ "level5": null }, { - "object_name": "cup", - "level1": "container", - "level2": "cup", + "object_name": "paper_box", + "level1": "tool", + "level2": "paper_box", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "container", - "level2": "plate", + "object_name": "carton", + "level1": "tool", + "level2": "carton", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-12246", - "dataset_size": "522.9MB", + "operation_platform_height": null, + "frame_range": "0-188369", + "dataset_size": "87.9GB", "statistics": { - "total_episodes": 48, - "total_frames": 12246, + "total_episodes": 402, + "total_frames": 188369, "total_tasks": 1, - "total_videos": 192, + "total_videos": 3216, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "4f813d8b-217d-4c36-9698-e2c22c0696ae", + "dataset_uuid": "15be5a22-893f-4c6d-8562-afa106f6846f", "language": [ "en", "zh" @@ -23830,12 +24791,8 @@ "robotics" ], "sub_tasks": [ - "Place the blue cup on the plate with the left gripper", - "Place the purple cup on the blue cup with the right gripper", - "Abnormal", - "End", - "Grasp the purple cup with the right gripper", - "Grasp the blue cup with the left gripper", + "Place the mouse and the power cord paper box into the container.", + "Pick up the mouse and the power cord paper box.", "null" ], "annotations": { @@ -23873,12 +24830,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_cup_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_cup_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AgiBot-g1_box_storage_cardboard_box_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_box_storage_cardboard_box_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Cobot_Magic_twist_bottle_cap": { - "path": "Cobot_Magic_twist_bottle_cap", - "dataset_name": "twist_bottle_cap", + "R1_Lite_switch_on_and_off_the_central_air_conditioning": { + "path": "R1_Lite_switch_on_and_off_the_central_air_conditioning", + "dataset_name": "switch_on_and_off_the_central_air_conditioning", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -23887,81 +24844,235 @@ "atomic_actions": [ "grasp", "pick", - "place", - "twist" + "place" ], - "tasks": "Place the Yili Changqing with the left hand", + "tasks": "Turn off the central air conditioner", "objects": [ { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bottle", - "level1": "container", - "level2": "bottle", + "object_name": "central_air_conditioning", + "level1": "household_appliances", + "level2": "central_air_conditioning", "level3": null, "level4": null, "level5": null - }, + } + ], + "operation_platform_height": null, + "frame_range": "0-19688", + "dataset_size": "793.3MB", + "statistics": { + "total_episodes": 35, + "total_frames": 19688, + "total_tasks": 1, + "total_videos": 105, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "6d6d5868-e715-41cc-a7df-972218e02479", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Turn off the central air conditioner", + "Press the temperature decrease button", + "Press the temperature increase button", + "Turn on the central air conditioner", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_switch_on_and_off_the_central_air_conditioning_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_switch_on_and_off_the_central_air_conditioning_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "G1edu-u3_plate_storage_rabbit_doll": { + "path": "G1edu-u3_plate_storage_rabbit_doll", + "dataset_name": "plate_storage_rabbit_doll", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the rabbit doll into the plate with the left gripper", + "objects": [ { - "object_name": "master_kong_oolong_tea", - "level1": "drink", - "level2": "master_kong_oolong_tea", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "green_tea", - "level1": "drink", - "level2": "green_tea", + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", "level3": null, "level4": null, "level5": null }, { - "object_name": "meinianda", - "level1": "drink", - "level2": "meinianda", + "object_name": "rabbit_doll", + "level1": "toy", + "level2": "rabbit_doll", "level3": null, "level4": null, "level5": null - }, + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-76754", + "dataset_size": "378.4MB", + "statistics": { + "total_episodes": 227, + "total_frames": 76754, + "total_tasks": 1, + "total_videos": 227, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "adde8f5b-b32a-49e8-b684-9f89a37ec8e4", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the rabbit doll into the plate with the left gripper", + "Grasp the rabbit doll with the left gripper", + "End", + "Static", + "Abnormal", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_plate_storage_rabbit_doll_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.cam_high_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_plate_storage_rabbit_doll_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.cam_high_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "RMC-AIDA-L_box_up_down": { + "path": "RMC-AIDA-L_box_up_down", + "dataset_name": "box_up_down", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Grasp the bandage with right gripper", + "objects": [ { - "object_name": "yili_changqing", - "level1": "drink", - "level2": "yili_changqing", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "cestbon", - "level1": "drink", - "level2": "cestbon", + "object_name": "box", + "level1": "container", + "level2": "box", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-749388", - "dataset_size": "8.7GB", + "frame_range": "0-31466", + "dataset_size": "383.7MB", "statistics": { - "total_episodes": 863, - "total_frames": 749388, - "total_tasks": 9, - "total_videos": 2589, + "total_episodes": 159, + "total_frames": 31466, + "total_tasks": 2, + "total_videos": 477, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "c084dcfc-e7a8-4e35-a194-0325296dcd6e", + "dataset_uuid": "e095217a-10c3-44e6-9a26-08a1758a1243", "language": [ "en", "zh" @@ -23970,55 +25081,17 @@ "robotics" ], "sub_tasks": [ - "Place the Yili Changqing with the left hand", - "Lift the Peach Oolong Tea with the left hand", - "Place the bottle cap on the table with the right hand", - "Move the Peach Oolong Tea from right to left", - "Move from left to right", - "Move the Peach Oolong Tea from left to right", - "Secure the C'estbon with the left hand", - "Move the Mirinda to the right with the left hand", - "Place the bottle on the table with the left gripper", - "Grasp the bottle and move it to the center of the table with the left gripper", - "Move the Mirinda from left to right", - "Move the Mirinda from top to bottom", - "Discard", - "Place the bottle cap on the table", + "Grasp the bandage with right gripper", + "Loosen the box on the table with right gripper", + "Place the box on the table with right gripper", + "Lift the box with left gripper", + "Place the box on the table with left gripper", + "Static", "End", - "Place the C'estbon with the left hand", - "Move the Mirinda from right to left", - "Move the green tea to the right with the left hand", - "Lift the bottle and move it to the center of the table with the left gripper", - "Grab the green tea with the left hand", - "Secure the Peach Oolong Tea with the left hand", - "Place the Mirinda with the left hand", - "Fix the Yili Changqing on the left", - "Secure the green tea with the left hand", - "Secure the Yili Changqing with the left hand", - "Move the Yili Changqing from left to right", + "Loosen the box on the table with left gripper", + "Lift the box with right gripper", + "Grasp the bandage with left gripper", "Abnormal", - "Place the bottle cap on the right", - "Place the green tea with the left hand", - "Move the Yili Changqing to the right with the left hand", - "Lift the green tea with the left hand", - "Grab the C'estbon with the left hand", - "Move from right to left", - "Unscrew the bottle cap with the right gripper while holding the bottle with the left gripper", - "Secure the Mirinda with the left hand", - "Grab the Yili Changqing with the left hand", - "Lift the Yili Changqing with the left hand", - "Move the C'estbon to the right with the left hand", - "Place the Peach Oolong Tea with the left hand", - "Move the Yili Changqing from right to left", - "Fix the Mirinda on the left", - "Move the Yili Changqing from bottom to top", - "Grab the Peach Oolong Tea with the left hand", - "Move the Peach Oolong Tea to the right with the left hand", - "Move the Yili Changqing from top to bottom", - "Grab the Mirinda with the left hand", - "Twist the bottle cap with the right hand", - "Lift the Mirinda with the left hand", - "Place the bottle cap on the table with the right gripper", "null" ], "annotations": { @@ -24056,10 +25129,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Cobot_Magic_twist_bottle_cap_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_twist_bottle_cap_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "RMC-AIDA-L_box_up_down_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_box_up_down_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Agilex_Cobot_Magic_storage_orange_white_bag": { + "Airbot_MMK2_storage_ice_cream": { "task_categories": [ "robotics" ], @@ -24089,11 +25162,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_storage_orange_white_bag", + "dataset_name": "Airbot_MMK2_storage_ice_cream", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "living_room", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -24101,25 +25174,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "home_storage", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "brown_basket", - "level1": "home_storage", - "level2": "brown_basket", + "object_name": "ice_cream", + "level1": "snacks", + "level2": "ice_cream", "level3": null, "level4": null, "level5": null }, { - "object_name": "white_canvas_bags", - "level1": "daily_necessities", - "level2": "white_canvas_bags", + "object_name": "storage_box", + "level1": "laboratory_supplies", + "level2": "storage_box", "level3": null, "level4": null, "level5": null @@ -24127,35 +25192,35 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "open the white linen bag and put the orange in the bag." + "put the ice cream into the storage box with left and right hands respectively." ], "sub_tasks": [ { - "subtask": "Grasp the orange with right gripper", + "subtask": "Grasp the ice cream with the right gripper", "subtask_index": 0 }, { - "subtask": "Grasp the orange with left gripper", + "subtask": "Place the ice cream into the white basket with the right gripper", "subtask_index": 1 }, { - "subtask": "End", + "subtask": "Abnormal", "subtask_index": 2 }, { - "subtask": "Lift the handbag with left gripper", + "subtask": "Static", "subtask_index": 3 }, { - "subtask": "Place the orange in the handbag with right gripper", + "subtask": "Place the ice cream into the white basket with the left gripper", "subtask_index": 4 }, { - "subtask": "Place the orange in the basket with right gripper", + "subtask": "End", "subtask_index": 5 }, { - "subtask": "Put down the handbag with left gripper", + "subtask": "Grasp the ice cream with the left gripper", "subtask_index": 6 }, { @@ -24165,29 +25230,31 @@ ], "atomic_actions": [ "grasp", - "lift", - "lower" + "pick", + "place" ], "robot_name": [ - "Agilex_Cobot_Magic" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -24198,23 +25265,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 100, - "total_frames": 43104, + "total_episodes": 47, + "total_frames": 7639, "fps": 30, "total_tasks": 8, - "total_videos": 300, + "total_videos": 188, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "406.02 MB" + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "307.05 MB" }, - "frame_num": 43104, - "dataset_size": "406.02 MB", - "data_structure": "Agilex_Cobot_Magic_storage_orange_white_bag_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (88 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 7639, + "dataset_size": "307.05 MB", + "data_structure": "Airbot_MMK2_storage_ice_cream_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:99" + "train": "0:46" }, "features": { "observation.images.cam_head_rgb": { @@ -24286,10 +25353,33 @@ "has_audio": false } }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, "observation.state": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -24298,32 +25388,42 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -24332,26 +25432,36 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -24391,17 +25501,17 @@ }, "subtask_annotation": { "names": null, - "dtype": "int32", "shape": [ 5 - ] + ], + "dtype": "int32" }, "scene_annotation": { "names": null, - "dtype": "int32", "shape": [ 1 - ] + ], + "dtype": "int32" }, "eef_sim_pose_state": { "names": [ @@ -24418,10 +25528,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_sim_pose_action": { "names": [ @@ -24438,130 +25548,70 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", "shape": [ 2 - ] - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" ], - "dtype": "float32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] + "dtype": "int32" } }, "authors": { @@ -24583,9 +25633,128 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Agilex_Cobot_Magic_classify_objects_eight": { + "RMC-AIDA-L_get_water": { + "path": "RMC-AIDA-L_get_water", + "dataset_name": "get_water", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place", + "hold", + "push" + ], + "tasks": "Move the cup beneath the water dispenser nozzle with the right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "faucet", + "level1": "tool", + "level2": "faucet", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cup", + "level1": "container", + "level2": "cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "water", + "level1": "drink", + "level2": "water", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-327536", + "dataset_size": "2.8GB", + "statistics": { + "total_episodes": 333, + "total_frames": 327536, + "total_tasks": 3, + "total_videos": 999, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "2a2cc6a7-ac07-494e-9b05-0a437532bdb1", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Move the cup beneath the water dispenser nozzle with the right gripper", + "Close the dispenser valve with the right gripper", + "Open the dispenser valve and fill the cup with water with the right gripper", + "Close the dispenser valve with the left gripper", + "Open the dispenser valve and fill the cup with water with the left gripper", + "end", + "Move the cup beneath the water dispenser nozzle with the left gripper", + "abnormal", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "RMC-AIDA-L_get_water_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_get_water_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Galaxea_R1_Lite_storage_object_pink_bowl": { "task_categories": [ "robotics" ], @@ -24615,11 +25784,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_classify_objects_eight", + "dataset_name": "Galaxea_R1_Lite_storage_object_pink_bowl", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "kitchen", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -24627,65 +25796,129 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "home_storage", - "level2": "table", + "object_name": "pink_bowl", + "level1": "plastic_bowl", + "level2": "pink_bowl", "level3": null, "level4": null, "level5": null }, { - "object_name": "brown_basket", - "level1": "home_storage", - "level2": "brown_basket", + "object_name": "banana", + "level1": "fruits", + "level2": "banana", "level3": null, "level4": null, "level5": null }, { - "object_name": "black_basket", - "level1": "food", - "level2": "black_basket", + "object_name": "bathing_in_flowers", + "level1": "daily_chemical_products", + "level2": "bathing_in_flowers", "level3": null, "level4": null, "level5": null }, { - "object_name": "bread", - "level1": "food", - "level2": "bread", + "object_name": "blue_cup", + "level1": "cups", + "level2": "blue_cup", "level3": null, "level4": null, "level5": null }, { - "object_name": "orange", - "level1": "food", - "level2": "orange", + "object_name": "blue_pot", + "level1": "kitchen_supplies", + "level2": "blue_pot", "level3": null, "level4": null, "level5": null }, { - "object_name": "green_lemon", - "level1": "food", - "level2": "green_lemon", + "object_name": "toast_slices", + "level1": "bread", + "level2": "toast_slices", "level3": null, "level4": null, "level5": null }, { - "object_name": "pink_clear_plastic_cup", + "object_name": "brown_towel", + "level1": "towels", + "level2": "brown_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "can", + "level1": "snacks", + "level2": "can", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "coke", + "level1": "beverages", + "level2": "coke", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "potato_chips", + "level1": "snacks", + "level2": "potato_chips", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chocolate", + "level1": "snacks", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "compass", + "level1": "stationery", + "level2": "compass", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "block_pillar", + "level1": "toys", + "level2": "block_pillar", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "egg_beater", "level1": "kitchen_supplies", - "level2": "pink_clear_plastic_cup", + "level2": "egg_beater", "level3": null, "level4": null, "level5": null }, { - "object_name": "laundry_detergent", - "level1": "daily_necessities", - "level2": "laundry_detergent", + "object_name": "eraser", + "level1": "stationery", + "level2": "eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "snacks", + "level2": "chewing_gum", "level3": null, "level4": null, "level5": null @@ -24693,87 +25926,87 @@ { "object_name": "mentholatum_facial_cleanser", "level1": "daily_necessities", - "level2": "Mmentholatum_facial_cleanser", + "level2": "mentholatum_facial_cleanser", "level3": null, "level4": null, "level5": null }, { - "object_name": "lemon", - "level1": "food", - "level2": "lemon", + "object_name": "green_lemon", + "level1": "fruits", + "level2": "green_lemon", "level3": null, "level4": null, "level5": null }, { - "object_name": "apple", - "level1": "food", - "level2": "apple", + "object_name": "peach", + "level1": "fruits", + "level2": "peach", "level3": null, "level4": null, "level5": null }, { - "object_name": "mango", - "level1": "food", - "level2": "mango", + "object_name": "power_strip", + "level1": "appliances", + "level2": "power_strip", "level3": null, "level4": null, "level5": null }, { - "object_name": "kiwi_fruit", - "level1": "food", - "level2": "kiwi_fruit", + "object_name": "round_bread", + "level1": "bread", + "level2": "round_bread", "level3": null, "level4": null, "level5": null }, { - "object_name": "mangosteen", - "level1": "food", - "level2": "mangosteen", + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_chemical_products", + "level2": "mentholatum_facial_cleanser", "level3": null, "level4": null, "level5": null }, { - "object_name": "pear", - "level1": "food", - "level2": "pear", + "object_name": "square_building_blocks", + "level1": "toys", + "level2": "square_building_blocks", "level3": null, "level4": null, "level5": null }, { - "object_name": "avocado", - "level1": "food", - "level2": "avocado", + "object_name": "tape", + "level1": "stationery", + "level2": "tape", "level3": null, "level4": null, "level5": null }, { - "object_name": "brown_clear_plastic_cup", - "level1": "kitchen_supplies", - "level2": "brown_clear_plastic_cup", + "object_name": "cake", + "level1": "bread", + "level2": "cake", "level3": null, "level4": null, "level5": null }, { - "object_name": "shampoo", - "level1": "daily_necessities", - "level2": "shampoo", + "object_name": "duck", + "level1": "doll", + "level2": "duck", "level3": null, "level4": null, "level5": null }, { - "object_name": "deli Water-based_Marker", - "level1": "stationery", - "level2": "deli_Water-based_Marker", + "object_name": "ambrosial_yogurt", + "level1": "beverages", + "level2": "ambrosial_yogurt", "level3": null, "level4": null, "level5": null @@ -24781,415 +26014,397 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "Place multiple objects separately in different baskets." + "use a gripper to pick the target object and place on the pink bowl." ], "sub_tasks": [ { - "subtask": "Place the pear in the light basket with right gripper", + "subtask": "Place the bread slice on the pink bowl with the left gripper", "subtask_index": 0 }, { - "subtask": "Pick up the shampoo with left gripper", + "subtask": "Place the round wooden block on the pink bowl with the left gripper", "subtask_index": 1 }, { - "subtask": "Place the pink marker pen in the dark basket with right gripper", + "subtask": "Grasp the blue pot with the left gripper", "subtask_index": 2 }, { - "subtask": "Pick up the pear with left gripper", + "subtask": "Grasp the plugboard with the left gripper", "subtask_index": 3 }, { - "subtask": "Place the lime in the light basket with right gripper", + "subtask": "Place the chocolate on the pink bowl with the right gripper", "subtask_index": 4 }, { - "subtask": "Place the laundry detergent in the dark basket with right gripper", + "subtask": "Grasp the potato chips with the right gripper", "subtask_index": 5 }, { - "subtask": "Place the mango in the light basket with right gripper", + "subtask": "Place the bread slice on the pink bowl with the right gripper", "subtask_index": 6 }, { - "subtask": "Pick up the light brown cup with left gripper", + "subtask": "Grasp the banana with the left gripper", "subtask_index": 7 }, { - "subtask": "Place the orange in the light basket with right gripper", + "subtask": "Place the round bread on the pink bowl with the right gripper", "subtask_index": 8 }, { - "subtask": "Pick up the dark brown cup with left gripper", + "subtask": "Grasp the compasses with the right gripper", "subtask_index": 9 }, { - "subtask": "Place the dark brown cup in the dark basket with left gripper", + "subtask": "Grasp the duck toy with the left gripper", "subtask_index": 10 }, { - "subtask": "Pick up the light brown cup with right gripper", + "subtask": "Place the banana on the pink bowl with the left gripper", "subtask_index": 11 }, { - "subtask": "Pick up pink marker pen with right gripper", + "subtask": "Place the hard facial cleanser on the pink bowl with the right gripper", "subtask_index": 12 }, { - "subtask": "Pick up the lime with left gripper", + "subtask": "Grasp the blue cup with the left gripper", "subtask_index": 13 }, { - "subtask": "Pick up kiwi with left gripper", + "subtask": "Place the brown towel on the pink bowl with the right gripper", "subtask_index": 14 }, { - "subtask": "Pick up mango with left gripper", + "subtask": "Place the duck toy on the pink bowl with the right gripper", "subtask_index": 15 }, { - "subtask": "Place the shampoo in the dark basket with right gripper", + "subtask": "Place the coke on the pink bowl with the right gripper", "subtask_index": 16 }, { - "subtask": "Place the dark brown cup in the dark basket with right gripper", + "subtask": "Grasp the square chewing gum with the left gripper", "subtask_index": 17 }, { - "subtask": "Pick up the pomegranate with right gripper", + "subtask": "Grasp the chocolate cake with the right gripper", "subtask_index": 18 }, { - "subtask": "Pick up the pear with right gripper", + "subtask": "Place the banana on the pink bowl with the right gripper", "subtask_index": 19 }, { - "subtask": "Pick up the dark brown cup with right gripper", + "subtask": "Grasp the shower sphere with the left gripper", "subtask_index": 20 }, { - "subtask": "Pick up the red cup with right gripper", + "subtask": "Grasp the plugboard with the right gripper", "subtask_index": 21 }, { - "subtask": "Place the pomegranate in the light basket with left gripper", + "subtask": "Grasp the yogurt with the right gripper", "subtask_index": 22 }, { - "subtask": "Pick up the avocado with right gripper", + "subtask": "Grasp the tin with the left gripper", "subtask_index": 23 }, { - "subtask": "Pick up the hard facial cleanser with left gripper", + "subtask": "Grasp the brown towel with the left gripper", "subtask_index": 24 }, { - "subtask": "Place the Incense box in the dark basket with left gripper", + "subtask": "Place the peach on the pink bowl with the left gripper", "subtask_index": 25 }, { - "subtask": "Pick up the shampoo with right gripper", + "subtask": "Grasp the chocolate with the right gripper", "subtask_index": 26 }, { - "subtask": "Place the banana in the light basket with right gripper", + "subtask": "Place the tape on the pink bowl with the right gripper", "subtask_index": 27 }, { - "subtask": "Place the laundry detergent in the dark basket with left gripper", + "subtask": "Grasp the peach with the right gripper", "subtask_index": 28 }, { - "subtask": "Pick up the orange with right gripper", + "subtask": "Grasp the brown towel with the right gripper", "subtask_index": 29 }, { - "subtask": "Place the mangosteen in the light basket with right gripper", + "subtask": "Grasp the back scratcher with the left gripper", "subtask_index": 30 }, { - "subtask": "Place the hard facial cleanser in the dark basket with right gripper", + "subtask": "Place the round wooden block on the pink bowl with the right gripper", "subtask_index": 31 }, { - "subtask": "Pick up Incense box with right gripper", + "subtask": "Place the tape on the pink bowl with the left gripper", "subtask_index": 32 }, { - "subtask": "Place the red marker in the dark basket with right gripper", + "subtask": "Place the duck toy on the pink bowl with the left gripper", "subtask_index": 33 }, { - "subtask": "Pick up the lemon with left gripper", + "subtask": "Place the shower sphere on the pink bowl with the left gripper", "subtask_index": 34 }, { - "subtask": "Place the banana in the light basket with left gripper", + "subtask": "Place the blue cup on the pink bowl with the left gripper", "subtask_index": 35 }, { - "subtask": "Pick up Incense box with left gripper", + "subtask": "Grasp the hard facial cleanser with the right gripper", "subtask_index": 36 }, { - "subtask": "Pick up kiwi with right gripper", + "subtask": "Place the blue pot on the pink bowl with the left gripper", "subtask_index": 37 }, { - "subtask": "Pick up the mango with right gripper", + "subtask": "Place the coke on the pink bowl with the left gripper", "subtask_index": 38 }, { - "subtask": "Place the lemon in the light basket with left gripper", + "subtask": "Place the blue cup on the pink bowl with the right gripper", "subtask_index": 39 }, { - "subtask": "Pick up the laundry detergent with left gripper", + "subtask": "Grasp the peach with the left gripper", "subtask_index": 40 }, { - "subtask": "Pick up toothpaste with right gripper", + "subtask": "Place the potato chips on the pink bowl with the right gripper", "subtask_index": 41 }, { - "subtask": "Pick up the bread with right gripper", + "subtask": "Place the square chewing gum on the pink bowl with the left gripper", "subtask_index": 42 }, { - "subtask": "Pick up toothpaste with left gripper", + "subtask": "Place the square wooden block on the pink bowl with the right gripper", "subtask_index": 43 }, { - "subtask": "Place the red cup in the dark basket with left gripper", + "subtask": "Place the blackboard erasure on the pink bowl with the left gripper", "subtask_index": 44 }, { - "subtask": "Place the red cup in the dark basket with right gripper", + "subtask": "Grasp the bread slice with the right gripper", "subtask_index": 45 }, { - "subtask": "Place the lemon in the light basket with right gripper", + "subtask": "Grasp the potato chips with the left gripper", "subtask_index": 46 }, { - "subtask": "Pick up banana with left gripper", + "subtask": "Grasp the duck toy with the right gripper", "subtask_index": 47 }, { - "subtask": "Place the avocado in the light basket with right gripper", + "subtask": "End", "subtask_index": 48 }, { - "subtask": "Pick up egg yolk pastry with left gripper", + "subtask": "Place the compasses on the pink bowl with the left gripper", "subtask_index": 49 }, { - "subtask": "Place the light brown cup in the dark basket with left gripper", + "subtask": "Place the blue pot on the pink bowl with the right gripper", "subtask_index": 50 }, { - "subtask": "Pick up egg yolk pastry with right gripper", + "subtask": "Grasp the blackboard erasure with the left gripper", "subtask_index": 51 }, { - "subtask": "Place the orange in the light basket with left gripper", + "subtask": "Grasp the coke with the left gripper", "subtask_index": 52 }, { - "subtask": "Place the hard facial cleanser in the dark basket with left gripper", + "subtask": "Place the chocolate cake on the pink bowl with the right gripper", "subtask_index": 53 }, { - "subtask": "Pick up the lemon with right gripper", + "subtask": "Grasp the round wooden block with the left gripper", "subtask_index": 54 }, { - "subtask": "Place the kiwi in the light basket with right gripper", + "subtask": "Place the blackboard erasure on the pink bowl with the right gripper", "subtask_index": 55 }, { - "subtask": "End", + "subtask": "Grasp the compasses with the left gripper", "subtask_index": 56 }, { - "subtask": "Pick up gray cup with left gripper", + "subtask": "Place the peach on the pink bowl with the right gripper", "subtask_index": 57 }, { - "subtask": "Place the pomegranate in the light basket with right gripper", + "subtask": "Place the soft facial cleanser on the pink bowl with the right gripper", "subtask_index": 58 }, { - "subtask": "Pick up banana with right gripper", + "subtask": "Grasp the blue pot with the right gripper", "subtask_index": 59 }, { - "subtask": "Place the blackboard eraser in the dark basket with left gripper", + "subtask": "Grasp the round bread with the right gripper", "subtask_index": 60 }, { - "subtask": "Pick up the red marker with right gripper", + "subtask": "Place the plugboard on the pink bowl with the left gripper", "subtask_index": 61 }, { - "subtask": "Place the red marker in the dark basket with left gripper", + "subtask": "Grasp the tin with the right gripper", "subtask_index": 62 }, { - "subtask": "Place the gray cup in the dark basket with left gripper", + "subtask": "Place the square chewing gum on the pink bowl with the right gripper", "subtask_index": 63 }, { - "subtask": "Pick up the red cup with left gripper", + "subtask": "Place the compasses on the pink bowl with the right gripper", "subtask_index": 64 }, { - "subtask": "Place the kiwi in the light basket with left gripper", + "subtask": "Grasp the tape with the right gripper", "subtask_index": 65 }, { - "subtask": "Pick up the mangosteen with left gripper", + "subtask": "Place the shower sphere on the pink bowl with the right gripper", "subtask_index": 66 }, { - "subtask": "Pick up the orange with left gripper", + "subtask": "Grasp the coke with the right gripper", "subtask_index": 67 }, { - "subtask": "Place the mangosteen in the light basket with left gripper", + "subtask": "Grasp the tape with the left gripper", "subtask_index": 68 }, { - "subtask": "Place the bread in the light basket with right gripper", + "subtask": "Place the tin on the pink bowl with the right gripper", "subtask_index": 69 }, { - "subtask": "Place the toothpaste in the dark basket with right gripper", + "subtask": "Grasp the square chewing gum with the right gripper", "subtask_index": 70 }, { - "subtask": "Pick up the lime with right gripper", + "subtask": "Grasp the shower sphere with the right gripper", "subtask_index": 71 }, { - "subtask": "Place the mango in the light basket with left gripper", + "subtask": "Grasp the bread slice with the left gripper", "subtask_index": 72 }, { - "subtask": "Pick up the mangosteen with right gripper", + "subtask": "Grasp the blackboard erasure with the right gripper", "subtask_index": 73 }, { - "subtask": "Place the pink marker pen in the dark basket with left gripper", + "subtask": "Place the brown towel on the pink bowl with the left gripper", "subtask_index": 74 }, { - "subtask": "Pick up the red marker with left gripper", + "subtask": "Place the square wooden block on the pink bowl with the left gripper", "subtask_index": 75 }, { - "subtask": "Place the pear in the light basket with left gripper", + "subtask": "Grasp the square wooden block with the left gripper", "subtask_index": 76 }, { - "subtask": "Pick up blackboard eraser with right gripper", + "subtask": "Grasp the blue cup with the right gripper", "subtask_index": 77 }, { - "subtask": "Place the blackboard eraser in the dark basket with right gripper", + "subtask": "Place the back scratcher on the pink bowl with the left gripper", "subtask_index": 78 }, { - "subtask": "Place the shampoo in the dark basket with left gripper", + "subtask": "Grasp the soft facial cleanser with the right gripper", "subtask_index": 79 }, { - "subtask": "Place the egg yolk pastry in the light basket with left gripper", + "subtask": "Place the green lemon on the pink bowl with the left gripper", "subtask_index": 80 }, { - "subtask": "Pick up the pomegranate with left gripper", + "subtask": "Grasp the square wooden block with the right gripper", "subtask_index": 81 }, { - "subtask": "Place the toothpaste in the dark basket with left gripper", + "subtask": "Grasp the green lemon with the left gripper", "subtask_index": 82 }, { - "subtask": "Pick up blackboard eraser with left gripper", + "subtask": "Place the tin on the pink bowl with the left gripper", "subtask_index": 83 }, { - "subtask": "Place the Incense box in the dark basket with right gripper", + "subtask": "Place the yogurt on the pink bowl with the right gripper", "subtask_index": 84 }, { - "subtask": "Pick up the hard facial cleanser with right gripper", + "subtask": "Place the potato chips on the pink bowl with the left gripper", "subtask_index": 85 }, { - "subtask": "Place the apple in the light basket with right gripper", + "subtask": "Grasp the round wooden block with the right gripper", "subtask_index": 86 }, { - "subtask": "Place the avocado in the light basket with left gripper", + "subtask": "Grasp the banana with the right gripper", "subtask_index": 87 }, { - "subtask": "Pick up pink marker pen with left gripper", + "subtask": "Place the plugboard on the pink bowl with the right gripper", "subtask_index": 88 }, - { - "subtask": "Pick up the avocado with left gripper", - "subtask_index": 89 - }, - { - "subtask": "Pick up the laundry detergent with right gripper", - "subtask_index": 90 - }, - { - "subtask": "Place the egg yolk pastry in the light basket with right gripper", - "subtask_index": 91 - }, - { - "subtask": "Pick up the apple with right gripper", - "subtask_index": 92 - }, - { - "subtask": "Place the lime in the light basket with left gripper", - "subtask_index": 93 - }, { "subtask": "null", - "subtask_index": 94 + "subtask_index": 89 } ], "atomic_actions": [ "grasp", - "lift", - "lower" + "pick", + "place" ], "robot_name": [ - "Agilex_Cobot_Magic" + "Galaxea_R1_Lite" ], "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -25200,30 +26415,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 197, - "total_frames": 337837, + "total_episodes": 102, + "total_frames": 20095, "fps": 30, - "total_tasks": 95, - "total_videos": 591, + "total_tasks": 90, + "total_videos": 408, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "4.51 GB" + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "752.15 MB" }, - "frame_num": 337837, - "dataset_size": "4.51 GB", - "data_structure": "Agilex_Cobot_Magic_classify_objects_eight_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (185 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 20095, + "dataset_size": "752.15 MB", + "data_structure": "Galaxea_R1_Lite_storage_object_pink_bowl_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (90 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:196" + "train": "0:101" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -25232,8 +26447,31 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -25245,8 +26483,8 @@ "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -25255,8 +26493,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -25268,8 +26506,8 @@ "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -25278,8 +26516,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -25291,7 +26529,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 26 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -25300,32 +26538,20 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_gripper_open", + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -25334,26 +26560,14 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_gripper_open", + "right_gripper_open" ] }, "timestamp": { @@ -25393,17 +26607,17 @@ }, "subtask_annotation": { "names": null, - "dtype": "int32", "shape": [ 5 - ] + ], + "dtype": "int32" }, "scene_annotation": { "names": null, - "dtype": "int32", "shape": [ 1 - ] + ], + "dtype": "int32" }, "eef_sim_pose_state": { "names": [ @@ -25420,10 +26634,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_sim_pose_action": { "names": [ @@ -25440,130 +26654,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" }, "gripper_mode_state": { "names": [ "left_gripper_mode", "right_gripper_mode" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "gripper_mode_action": { "names": [ "left_gripper_mode", "right_gripper_mode" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "gripper_activity_state": { "names": [ "left_gripper_activity", "right_gripper_activity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "gripper_activity_action": { "names": [ "left_gripper_activity", "right_gripper_activity" ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", "shape": [ 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" ], - "dtype": "float32", - "shape": [ - 2 - ] + "dtype": "int32" } }, "authors": { @@ -25585,9 +26799,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Airbot_MMK2_click_pen": { + "Airbot_MMK2_stack_cup": { "task_categories": [ "robotics" ], @@ -25617,11 +26831,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_click_pen", + "dataset_name": "Airbot_MMK2_stack_cup", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "education", - "level2": "school", + "level1": "household", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -25629,25 +26843,9 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "block", - "level1": "toy", - "level2": "block", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pen", - "level1": "stationery", - "level2": "pen", + "object_name": "cup", + "level1": "kitchen_supplies", + "level2": "cup", "level3": null, "level4": null, "level5": null @@ -25655,43 +26853,38 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the pen with your hand, press the pen switch and then place it on the table." + "pick up the cup by hand and stack it on top of another cup." ], "sub_tasks": [ { - "subtask": "End", + "subtask": "Place the purple cup on the pink cup with the left gripper", "subtask_index": 0 }, { - "subtask": "Lift the pen with the right gripper", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "Grasp the pen with the right gripper", + "subtask": "Grasp the white cup with the right gripper", "subtask_index": 2 }, { - "subtask": "Place the pen on the table with the right gripper", + "subtask": "Place the white cup on the purple cup with the right gripper", "subtask_index": 3 }, { - "subtask": "Abnormal", + "subtask": "Grasp the purple cup with the left gripper", "subtask_index": 4 }, - { - "subtask": "Press the pen switch with the right gripper", - "subtask_index": 5 - }, { "subtask": "null", - "subtask_index": 6 + "subtask_index": 5 } ], "atomic_actions": [ - "garsp", + "grasp", "pick", - "place", - "pressbutton" + "place" ], "robot_name": [ "Airbot_MMK2" @@ -25725,23 +26918,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 100, - "total_frames": 30984, + "total_episodes": 99, + "total_frames": 67637, "fps": 30, - "total_tasks": 7, - "total_videos": 400, + "total_tasks": 6, + "total_videos": 396, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "1.12 GB" + "dataset_size": "2.37 GB" }, - "frame_num": 30984, - "dataset_size": "1.12 GB", - "data_structure": "Airbot_MMK2_click_pen_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (88 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 67637, + "dataset_size": "2.37 GB", + "data_structure": "Airbot_MMK2_stack_cup_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:99" + "train": "0:98" }, "features": { "observation.images.cam_head_rgb": { @@ -26095,879 +27288,173 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "agilex_cobot_magic_pass_object_right_to_left_green_tablecloth": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" + "leju_robot_hotel_services_ab": { + "path": "leju_robot_hotel_services_ab", + "dataset_name": "hotel_services_ab", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "agilex_cobot_magic_pass_object_right_to_left_green_tablecloth", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "commercial_convenience", - "level2": "supermarket", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "tasks": "End", "objects": [ { "object_name": "table", - "level1": "home_storage", + "level1": "furniture", "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "ambrosial_yogurt", - "level1": "food", - "level2": "ambrosial_yogurt", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "food", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "long_bread", - "level1": "food", - "level2": "long_bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "milk", - "level1": "food", - "level2": "milk", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yogurt", - "level1": "food", - "level2": "yogurt", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "grape", - "level1": "food", - "level2": "grape", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "ham_sausage", - "level1": "food", - "level2": "ham_sausage", + "object_name": "box", + "level1": "home_storage", + "level2": "box", "level3": null, "level4": null, "level5": null }, { - "object_name": "eggplant", - "level1": "food", - "level2": "eggplant", + "object_name": "card", + "level1": "nfc", + "level2": "card", "level3": null, "level4": null, "level5": null }, { - "object_name": "chewing_gum", - "level1": "food", - "level2": "chewing_gum", + "object_name": "sensor", + "level1": "electronic_products", + "level2": "sensor", "level3": null, "level4": null, "level5": null - }, + } + ], + "operation_platform_height": null, + "frame_range": "0-81123", + "dataset_size": "5.2GB", + "statistics": { + "total_episodes": 450, + "total_frames": 81123, + "total_tasks": 1, + "total_videos": 1350, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "e7a6fa76-d4ee-4f1a-b951-26d50fb71f0d", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Take out the room card with right gripper", + "Hand the room card to the guest with right gripper", + "Hand the room card to the target.", + "Pick up the room card from the card holder.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_hotel_services_ab_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_hotel_services_ab_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "AIRBOT_MMK2_cup_storage": { + "path": "AIRBOT_MMK2_cup_storage", + "dataset_name": "cup_storage", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the blue cup on the plate with the left gripper", + "objects": [ { - "object_name": "eyeglass_case", - "level1": "laboratory_supplies", - "level2": "eyeglass_case", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "rubik's_cube", - "level1": "toys", - "level2": "rubik's_cube", + "object_name": "cup", + "level1": "container", + "level2": "cup", "level3": null, "level4": null, "level5": null }, { - "object_name": "purple_trash_bag", - "level1": "trash", - "level2": "purple_trash_bag", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cleanser", - "level1": "daily_necessities", - "level2": "cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bathing_in_flowers", - "level1": "daily_necessities", - "level2": "bathing_in_flowers", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "whiteboard_eraser", - "level1": "stationery", - "level2": "whiteboard_eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "candle", - "level1": "daily_necessities", - "level2": "candle", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "green_table_cloths", - "level1": "laboratory_supplies", - "level2": "green_table_cloths", - "level3": null, - "level4": null, - "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "use the right gripper to pick up the item and transfer it from the right gripper to the left gripper." - ], - "sub_tasks": [ - { - "subtask": "The left gripper places milk on the left side of the table", - "subtask_index": 0 - }, - { - "subtask": "The left gripper places bread on the left side of the table", - "subtask_index": 1 - }, - { - "subtask": "Use the right gripper to grab the shampoo on the right side of the table", - "subtask_index": 2 - }, - { - "subtask": "Use the right to grab the bread on the right side of table", - "subtask_index": 3 - }, - { - "subtask": "Pass the bread to the left gripper", - "subtask_index": 4 - }, - { - "subtask": "Use the right gripper to grab the bread on the right side of the table", - "subtask_index": 5 - }, - { - "subtask": "The left gripper places grape on the left side of the table\n", - "subtask_index": 6 - }, - { - "subtask": "End", - "subtask_index": 7 - }, - { - "subtask": "The left gripper places bread on the left side of the table\n", - "subtask_index": 8 - }, - { - "subtask": "Pass the eggplant to the left gripper", - "subtask_index": 9 - }, - { - "subtask": "The left gripper places yogurt on the left side of the table", - "subtask_index": 10 - }, - { - "subtask": "Pass the purple garbage bag to the left gripper", - "subtask_index": 11 - }, - { - "subtask": "Use the right gripper to grab the xbreadon the right side of the table\n", - "subtask_index": 12 - }, - { - "subtask": "The left gripper places yogurt on the left side of the table", - "subtask_index": 13 - }, - { - "subtask": "Pass the Rubik's Cube to the left gripper", - "subtask_index": 14 - }, - { - "subtask": "Use the right gripper to grab the grape on the right side of the table\n", - "subtask_index": 15 - }, - { - "subtask": "Use the right gripper to grab the grape on the right side of the table", - "subtask_index": 16 - }, - { - "subtask": "Pass the yogurt to the left gripper", - "subtask_index": 17 - }, - { - "subtask": "Pass the bread to the left gripper\n", - "subtask_index": 18 - }, - { - "subtask": "Use the right gripper to grab the yogurt on the right side of the table\n", - "subtask_index": 19 - }, - { - "subtask": "Pass the grape to the left grippe", - "subtask_index": 20 - }, - { - "subtask": "Use the right gripper to grab the milk on the right side of the table\n", - "subtask_index": 21 - }, - { - "subtask": "The left gripper places shampoo on the left side of the table", - "subtask_index": 22 - }, - { - "subtask": "The left gripper places grape on the left side of the table", - "subtask_index": 23 - }, - { - "subtask": "The left gripper places eggplant on the left side of the table", - "subtask_index": 24 - }, - { - "subtask": "Use the right gripper to grab the milk on the right side of the table", - "subtask_index": 25 - }, - { - "subtask": "Pass the milk to the left gripper", - "subtask_index": 26 - }, - { - "subtask": "The left gripper places banana on the left side of the table", - "subtask_index": 27 - }, - { - "subtask": "The left gripper places milk on the left side of the table", - "subtask_index": 28 - }, - { - "subtask": "The left gripper places xx on the left side of the table", - "subtask_index": 29 - }, - { - "subtask": "Pass the shampoo to the left gripper", - "subtask_index": 30 - }, - { - "subtask": "\nPass the bread to the left gripper\n", - "subtask_index": 31 - }, - { - "subtask": "Use the right gripper to grab the bread on the right side of the table\n", - "subtask_index": 32 - }, - { - "subtask": "The left gripper places purple garbage bag on the left side of the table", - "subtask_index": 33 - }, - { - "subtask": "Use the right gripper to grab the xx on the right side of the table", - "subtask_index": 34 - }, - { - "subtask": "Pass the milk to the left gripper\n", - "subtask_index": 35 - }, - { - "subtask": "Pass the banana to the left gripper", - "subtask_index": 36 - }, - { - "subtask": "Pass the shampoo to the left gripper\n", - "subtask_index": 37 - }, - { - "subtask": "The left gripper places Rubik's Cube on the left side of the table", - "subtask_index": 38 - }, - { - "subtask": "Use the right gripper to grab the purple garbage bag on the right side of the table", - "subtask_index": 39 - }, - { - "subtask": "\nPass the yogurt to the left gripper", - "subtask_index": 40 - }, - { - "subtask": "\nPass the bread to the left gripper", - "subtask_index": 41 - }, - { - "subtask": "Use the right gripper to shampoo the on the right side of the table", - "subtask_index": 42 - }, - { - "subtask": "User the right gripper to grab the grape on the right side of the table\n", - "subtask_index": 43 - }, - { - "subtask": "The left gripper places milk on the left side of the table\n", - "subtask_index": 44 - }, - { - "subtask": "Use the right gripper to grab the yogurt on the right side of the table", - "subtask_index": 45 - }, - { - "subtask": "The left gripper places yogurt on the left side of the table\n", - "subtask_index": 46 - }, - { - "subtask": "Use the right gripper to grab the shampoo on the right side of the table", - "subtask_index": 47 - }, - { - "subtask": "Use the right gripper to grab the Rubik's Cube on the right side of the table", - "subtask_index": 48 - }, - { - "subtask": "\nPass the milk to the left gripper\n", - "subtask_index": 49 - }, - { - "subtask": "Pass the grape to the left gripper", - "subtask_index": 50 - }, - { - "subtask": "Pass the grape to the left gripper\n", - "subtask_index": 51 - }, - { - "subtask": "null", - "subtask_index": 52 - } - ], - "atomic_actions": [ - "grasp", - "lift", - "lower", - "handover", - "takeover" - ], - "robot_name": [ - "agilex_cobot_magic" - ], - "end_effector_type": "two_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb" - ], - "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" - }, - "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" - ], - "statistics": { - "total_episodes": 98, - "total_frames": 69065, - "fps": 30, - "total_tasks": 53, - "total_videos": 294, - "total_chunks": 1, - "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "884.48 MB" - }, - "frame_num": 69065, - "dataset_size": "884.48 MB", - "data_structure": "Agilex_Cobot_Magic_pass_object_right_to_left_green_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", - "splits": { - "train": "0:97" - }, - "features": { - "observation.images.cam_head_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_left_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_right_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.state": { - "dtype": "float32", - "shape": [ - 26 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" - ] - }, - "action": { - "dtype": "float32", - "shape": [ - 26 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" - ] - }, - "timestamp": { - "dtype": "float32", - "shape": [ - 1 - ], - "names": null - }, - "frame_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "episode_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "task_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "subtask_annotation": { - "names": null, - "dtype": "int32", - "shape": [ - 5 - ] - }, - "scene_annotation": { - "names": null, - "dtype": "int32", - "shape": [ - 1 - ] - }, - "eef_sim_pose_state": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "dtype": "float32", - "shape": [ - 12 - ] - }, - "eef_sim_pose_action": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "dtype": "float32", - "shape": [ - 12 - ] - }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "eef_direction_action": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] - } - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" - } - ] - }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" - }, - "alpha_bot_2_sticker": { - "path": "alpha_bot_2_sticker", - "dataset_name": "sticker", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "End", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "poster", - "level1": "toy", - "level2": "poster", + "object_name": "plate", + "level1": "container", + "level2": "plate", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-97993", - "dataset_size": "1.9GB", + "operation_platform_height": 77.2, + "frame_range": "0-12246", + "dataset_size": "522.9MB", "statistics": { - "total_episodes": 93, - "total_frames": 97993, + "total_episodes": 48, + "total_frames": 12246, "total_tasks": 1, - "total_videos": 372, + "total_videos": 192, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "81aee7b0-84c3-4b85-9778-c5c9df188afd", + "dataset_uuid": "4f813d8b-217d-4c36-9698-e2c22c0696ae", "language": [ "en", "zh" @@ -26976,8 +27463,12 @@ "robotics" ], "sub_tasks": [ + "Place the blue cup on the plate with the left gripper", + "Place the purple cup on the blue cup with the right gripper", + "Abnormal", "End", - "Stick the advertisement in the top-right corner area with both grippers", + "Grasp the purple cup with the right gripper", + "Grasp the blue cup with the left gripper", "null" ], "annotations": { @@ -27015,12 +27506,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "alpha_bot_2_sticker_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "alpha_bot_2_sticker_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_cup_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_cup_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "R1_Lite_stack_baskets": { - "path": "R1_Lite_stack_baskets", - "dataset_name": "stack_baskets", + "Cobot_Magic_twist_bottle_cap": { + "path": "Cobot_Magic_twist_bottle_cap", + "dataset_name": "twist_bottle_cap", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -27028,10 +27519,11 @@ "scene_type": [], "atomic_actions": [ "grasp", + "pick", "place", - "pick" + "twist" ], - "tasks": "Grasp the dark basket with the right gripper", + "tasks": "Place the Yili Changqing with the left hand", "objects": [ { "object_name": "table", @@ -27042,127 +27534,67 @@ "level5": null }, { - "object_name": "basket", + "object_name": "bottle", "level1": "container", - "level2": "basket", + "level2": "bottle", "level3": null, "level4": null, "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-57189", - "dataset_size": "4.1GB", - "statistics": { - "total_episodes": 101, - "total_frames": 57189, - "total_tasks": 1, - "total_videos": 404, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "4b7a3d8c-18c0-45af-9337-02adad3a72de", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the dark basket with the right gripper", - "End", - "Place the light basket on the middle of the table with the left gripper", - "Grasp the light basket with the left gripper", - "Place the dark basket into the light color basket with the right gripper", - "Static", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_stack_baskets_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_stack_baskets_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Galbot_g1_fold_clothe_b": { - "path": "Galbot_g1_fold_clothe_b", - "dataset_name": "fold_clothe_b", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place", - "flod" - ], - "tasks": "Abnormal", - "objects": [ + }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "master_kong_oolong_tea", + "level1": "drink", + "level2": "master_kong_oolong_tea", "level3": null, "level4": null, "level5": null }, { - "object_name": "clothes", - "level1": "fabric", - "level2": "clothes", + "object_name": "green_tea", + "level1": "drink", + "level2": "green_tea", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "meinianda", + "level1": "drink", + "level2": "meinianda", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yili_changqing", + "level1": "drink", + "level2": "yili_changqing", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cestbon", + "level1": "drink", + "level2": "cestbon", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-197328", - "dataset_size": "8.8GB", + "operation_platform_height": 77.2, + "frame_range": "0-749388", + "dataset_size": "8.7GB", "statistics": { - "total_episodes": 213, - "total_frames": 197328, - "total_tasks": 1, - "total_videos": 639, + "total_episodes": 863, + "total_frames": 749388, + "total_tasks": 9, + "total_videos": 2589, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "0dd63c25-960f-4489-88af-e3c5c38998c2", + "dataset_uuid": "c084dcfc-e7a8-4e35-a194-0325296dcd6e", "language": [ "en", "zh" @@ -27171,17 +27603,55 @@ "robotics" ], "sub_tasks": [ - "Abnormal", - "use the left gripper to clamp the left edge of the fabric", - "Drag the clothes to the center of the table", + "Place the Yili Changqing with the left hand", + "Lift the Peach Oolong Tea with the left hand", + "Place the bottle cap on the table with the right hand", + "Move the Peach Oolong Tea from right to left", + "Move from left to right", + "Move the Peach Oolong Tea from left to right", + "Secure the C'estbon with the left hand", + "Move the Mirinda to the right with the left hand", + "Place the bottle on the table with the left gripper", + "Grasp the bottle and move it to the center of the table with the left gripper", + "Move the Mirinda from left to right", + "Move the Mirinda from top to bottom", + "Discard", + "Place the bottle cap on the table", "End", - "use both grippers simultaneously to clamp the upper edge of the clothing fabric", - "use both grippers to drag the lower edge of the fabric forward and fold it over the upper edge", - "use both grippers to drag the upper edge of the fabric backward and fold it over the lower edge", - "use both grippers simultaneously to clamp the lower edge of the clothing fabric", - "Drag the clothes downward with both gripper", - "use the left gripper to drag the left edge of the fabric to the left and folds it over the right edge", - "Flip the folded clothes over with left gripper", + "Place the C'estbon with the left hand", + "Move the Mirinda from right to left", + "Move the green tea to the right with the left hand", + "Lift the bottle and move it to the center of the table with the left gripper", + "Grab the green tea with the left hand", + "Secure the Peach Oolong Tea with the left hand", + "Place the Mirinda with the left hand", + "Fix the Yili Changqing on the left", + "Secure the green tea with the left hand", + "Secure the Yili Changqing with the left hand", + "Move the Yili Changqing from left to right", + "Abnormal", + "Place the bottle cap on the right", + "Place the green tea with the left hand", + "Move the Yili Changqing to the right with the left hand", + "Lift the green tea with the left hand", + "Grab the C'estbon with the left hand", + "Move from right to left", + "Unscrew the bottle cap with the right gripper while holding the bottle with the left gripper", + "Secure the Mirinda with the left hand", + "Grab the Yili Changqing with the left hand", + "Lift the Yili Changqing with the left hand", + "Move the C'estbon to the right with the left hand", + "Place the Peach Oolong Tea with the left hand", + "Move the Yili Changqing from right to left", + "Fix the Mirinda on the left", + "Move the Yili Changqing from bottom to top", + "Grab the Peach Oolong Tea with the left hand", + "Move the Peach Oolong Tea to the right with the left hand", + "Move the Yili Changqing from top to bottom", + "Grab the Mirinda with the left hand", + "Twist the bottle cap with the right hand", + "Lift the Mirinda with the left hand", + "Place the bottle cap on the table with the right gripper", "null" ], "annotations": { @@ -27219,10 +27689,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Galbot_g1_fold_clothe_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Galbot_g1_fold_clothe_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Cobot_Magic_twist_bottle_cap_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_twist_bottle_cap_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Agilex_Cobot_Magic_erase_board_left": { + "Agilex_Cobot_Magic_storage_orange_white_bag": { "task_categories": [ "robotics" ], @@ -27252,11 +27722,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_erase_board_left", + "dataset_name": "Agilex_Cobot_Magic_storage_orange_white_bag", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "education", - "level2": "school", + "level1": "household", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -27272,17 +27742,17 @@ "level5": null }, { - "object_name": "whiteboard", - "level1": "stationery", - "level2": "whiteboard", + "object_name": "brown_basket", + "level1": "home_storage", + "level2": "brown_basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "whiteboard_eraser", - "level1": "stationery", - "level2": "whiteboard_eraser", + "object_name": "white_canvas_bags", + "level1": "daily_necessities", + "level2": "white_canvas_bags", "level3": null, "level4": null, "level5": null @@ -27290,39 +27760,46 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "use the left claw to pick up the whiteboard eraser, wipe the notes on the whiteboard clean, and then put them back in place." + "open the white linen bag and put the orange in the bag." ], "sub_tasks": [ { - "subtask": "Place the eraser with the left gripper", + "subtask": "Grasp the orange with right gripper", "subtask_index": 0 }, { - "subtask": "Grasp the eraser with the left gripper", + "subtask": "Grasp the orange with left gripper", "subtask_index": 1 }, { - "subtask": "Abnormal", + "subtask": "End", "subtask_index": 2 }, { - "subtask": "End", + "subtask": "Lift the handbag with left gripper", "subtask_index": 3 }, { - "subtask": "Grasp the eraser and wipe the blackboard with the left gripper", + "subtask": "Place the orange in the handbag with right gripper", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "Place the orange in the basket with right gripper", "subtask_index": 5 + }, + { + "subtask": "Put down the handbag with left gripper", + "subtask_index": 6 + }, + { + "subtask": "null", + "subtask_index": 7 } ], "atomic_actions": [ "grasp", "lift", - "wipe", - "handover" + "lower" ], "robot_name": [ "Agilex_Cobot_Magic" @@ -27354,23 +27831,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 49, - "total_frames": 23847, + "total_episodes": 100, + "total_frames": 43104, "fps": 30, - "total_tasks": 6, - "total_videos": 147, + "total_tasks": 8, + "total_videos": 300, "total_chunks": 1, "chunks_size": 1000, "state_dim": 26, "action_dim": 26, "camera_views": 3, - "dataset_size": "217.68 MB" + "dataset_size": "406.02 MB" }, - "frame_num": 23847, - "dataset_size": "217.68 MB", - "data_structure": "Agilex_Cobot_Magic_erase_board_left_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 43104, + "dataset_size": "406.02 MB", + "data_structure": "Agilex_Cobot_Magic_storage_orange_white_bag_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (88 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:48" + "train": "0:99" }, "features": { "observation.images.cam_head_rgb": { @@ -27741,1442 +28218,590 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "R1_Lite_connect_the_router_cable": { - "path": "R1_Lite_connect_the_router_cable", - "dataset_name": "connect_the_router_cable", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" + "Agilex_Cobot_Magic_classify_objects_eight": { + "task_categories": [ + "robotics" ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place", - "insert" - ], - "tasks": "Hand it to the other hand", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "router", - "level1": "electric_appliance", - "level2": "router", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "socket", - "level1": "electric_appliance", - "level2": "socket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cable", - "level1": "electric_appliance", - "level2": "cable", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plug", - "level1": "electric_appliance", - "level2": "plug", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-165617", - "dataset_size": "9.4GB", - "statistics": { - "total_episodes": 105, - "total_frames": 165617, - "total_tasks": 1, - "total_videos": 315, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "2802db0f-0c22-4fba-b305-aae14b28e492", "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Hand it to the other hand", - "Pick up plug", - "Pick up the cable", - "Close the lower drawer", - "Place the iced tea in the fridge", - "Open the upper fridge door", - "Plug it into the socket", - "End", - "Place zucchini in the lower drawer", - "Open the lower drawer", - "Deliver it to the other hand", - "Abnormal", - "Plug plug into socket", - "Open the plastic bag", - "Place pear in the lower drawer", - "Place the Wahaha in the fridge", - "Pick up cable", - "Close the middle refrigerator door", - "Open the middle refrigerator door", - "Place king oyster Mushroom in the lower drawer", - "Plug cable into router interface", - "Align cable and router interface", - "Close the upper refrigerator door", - "Align plug and socket", - "Plug it into the receiver", - "Pick up the plug", - "null" + "en" ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", "tags": [ "RoboCOIN", "LeRobot" ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_connect_the_router_cable_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_connect_the_router_cable_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Cobot_Magic_pour_drink": { - "path": "Cobot_Magic_pour_drink", - "dataset_name": "pour_drink", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place", - "pour" + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } ], - "tasks": "Grasp the black cup with left gripper", + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_classify_objects_eight", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "kitchen", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { "object_name": "table", - "level1": "furniture", + "level1": "home_storage", "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "NFC_orange_juice", - "level1": "drink", - "level2": "NFC_orange_juice", + "object_name": "brown_basket", + "level1": "home_storage", + "level2": "brown_basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "red_wine", - "level1": "drink", - "level2": "red_wine", + "object_name": "black_basket", + "level1": "food", + "level2": "black_basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "sprite", - "level1": "drink", - "level2": "sprite", + "object_name": "bread", + "level1": "food", + "level2": "bread", "level3": null, "level4": null, "level5": null }, { - "object_name": "black_mug", - "level1": "container", - "level2": "black_mug", + "object_name": "orange", + "level1": "food", + "level2": "orange", "level3": null, "level4": null, "level5": null }, { - "object_name": "paper_cup", - "level1": "container", - "level2": "paper_cup", + "object_name": "green_lemon", + "level1": "food", + "level2": "green_lemon", "level3": null, "level4": null, "level5": null }, { - "object_name": "transparent_cup", - "level1": "container", - "level2": "transparent_cup", + "object_name": "pink_clear_plastic_cup", + "level1": "kitchen_supplies", + "level2": "pink_clear_plastic_cup", "level3": null, "level4": null, "level5": null }, { - "object_name": "tablecloth", - "level1": "clothing", - "level2": "tablecloth", + "object_name": "laundry_detergent", + "level1": "daily_necessities", + "level2": "laundry_detergent", "level3": null, "level4": null, "level5": null }, { - "object_name": "cestbon", - "level1": "drink", - "level2": "cestbon", + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_necessities", + "level2": "Mmentholatum_facial_cleanser", "level3": null, "level4": null, "level5": null }, { - "object_name": "coffee", - "level1": "drink", - "level2": "coffee", + "object_name": "lemon", + "level1": "food", + "level2": "lemon", "level3": null, "level4": null, "level5": null }, { - "object_name": "cola", - "level1": "drink", - "level2": "cola", + "object_name": "apple", + "level1": "food", + "level2": "apple", "level3": null, "level4": null, "level5": null }, { - "object_name": "milk", - "level1": "drink", - "level2": "milk", + "object_name": "mango", + "level1": "food", + "level2": "mango", "level3": null, "level4": null, "level5": null }, { - "object_name": "cup", - "level1": "container", - "level2": "cup", + "object_name": "kiwi_fruit", + "level1": "food", + "level2": "kiwi_fruit", "level3": null, "level4": null, "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-862292", - "dataset_size": "12.6GB", - "statistics": { - "total_episodes": 1613, - "total_frames": 862292, - "total_tasks": 61, - "total_videos": 4839, - "total_chunks": 2, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "9b6b4d09-0c15-4d1f-b8f9-12be94c871cd", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the black cup with left gripper", - "Grasp the white cup with right gripper", - "Static", - "Place the orange juice bottle on the table with left gripper", - "Lift the black cup with left gripper", - "Place the water bottle on the table with right gripper", - "Place the grey cup on the table with left gripper", - "End", - "Place the sprite bottle on the table with right gripper", - "Lift the red cup with left gripper", - "Place the cola bottle on the table with left gripper", - "Grasp the bottle with cola with right gripper", - "Grasp the bottle with sprite with left gripper", - "Pour the water from bottle to cup with right gripper", - "Place the coffee bottle on the table with right gripper", - "Grasp the bottle with orange juice with left gripper", - "Pour the red wine from bottle to cup with left gripper", - "Place the black cup in the center of view with right gripper", - "Place the transparent cup on the table with right gripper", - "Place the white cup on the table with right gripper", - "Grasp the bottle filled water with right gripper", - "Grasp the bottle with coffee with right gripper", - "Place the yellow paper cup on the table with left gripper", - "Pour the yuexian Milk from bottle to cup with right gripper", - "Grasp the bottle with red wine with left gripper", - "Grasp the black cup with right gripper", - "Place the white cup on the table with left gripper", - "Grasp the yellow paper cup with right gripper", - "Grasp the yellow paper cup with left gripper", - "Pour the orange juice from bottle to cup with left gripper", - "Grasp the bottle with water with right gripper", - "Pour the yogurt from bottle to cup with left gripper", - "Grasp the bottle with yuexian Milk with right gripper", - "Place the yellow paper cup on the table with right gripper", - "Pour the cola from bottle to cup with right gripper", - "Abnormal", - "Grasp the white cup with left gripper", - "Pour the sprite from bottle to cup with left gripper", - "Lift the grey cup with left gripper", - "Place the black cup on the table with left gripper", - "Grasp the red cup with left gripper", - "Place the red cup on the table with left gripper", - "Lift the yellow paper cup with right gripper", - "Grasp the grey cup with left gripper", - "Place the yuexian Milk bottle on the table with right gripper", - "Lift the yellow paper cup with left gripper", - "Place the sprite bottle on the table with left gripper", - "Lift the white cup with left gripper", - "Place the cola bottle on the table with right gripper", - "Pour the sprite from bottle to cup with right gripper", - "Place the red wine bottle on the table with left gripper", - "Lift the white cup with right gripper", - "Grasp the transparent cup with right gripper", - "Grasp the bottle with sprite with right gripper", - "Pour the coffee from bottle to cup with right gripper", - "Lift the grey cup with right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_pour_drink_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)", - "structure": "Cobot_Magic_pour_drink_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)" - }, - "R1_Lite_wash_the_tableware": { - "path": "R1_Lite_wash_the_tableware", - "dataset_name": "wash_the_tableware", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Grasp yellow bowl with right gripper", - "objects": [ + }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "mangosteen", + "level1": "food", + "level2": "mangosteen", "level3": null, "level4": null, "level5": null }, { - "object_name": "sink", - "level1": "container", - "level2": "sink", + "object_name": "pear", + "level1": "food", + "level2": "pear", "level3": null, "level4": null, "level5": null }, { - "object_name": "chopsticks", - "level1": "tableware", - "level2": "chopsticks", + "object_name": "avocado", + "level1": "food", + "level2": "avocado", "level3": null, "level4": null, "level5": null }, { - "object_name": "spoon", - "level1": "tableware", - "level2": "spoon", + "object_name": "brown_clear_plastic_cup", + "level1": "kitchen_supplies", + "level2": "brown_clear_plastic_cup", "level3": null, "level4": null, "level5": null }, { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", + "object_name": "shampoo", + "level1": "daily_necessities", + "level2": "shampoo", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "container", - "level2": "plate", + "object_name": "deli Water-based_Marker", + "level1": "stationery", + "level2": "deli_Water-based_Marker", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-197672", - "dataset_size": "7.3GB", - "statistics": { - "total_episodes": 137, - "total_frames": 197672, - "total_tasks": 1, - "total_videos": 411, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "62b4f49b-4413-4d29-9822-c440835cdf41", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "Place multiple objects separately in different baskets." ], "sub_tasks": [ - "Grasp yellow bowl with right gripper", - "Pick up the bowl", - "Open the dispenser valve with right gripper", - "End", - "Rinse the chopsticks", - "Put the chopsticks into the sink", - "Place the chopsticks on the table", - "Put a spoon back to the table from the sink", - "Put a bowl back to the table from the sink", - "Place pink bowl into the sink with right gripper", - "Rinse the plate", - "Wait for the water to rinse the bowl", - "Place a chopstick into the sink with right gripper", - "Put a bowl into the sink", - "Rinse the bowl", - "Place a chopstick on the bowl with right gripper", - "Place the plate on the table", - "Pour the water from the yellow bowl into the sink with right gripper", - "Place the bowl on the table", - "Pick up the chopsticks", - "Put the chopsticks back to the table from the sink", - "Put a spoon into the sink", - "Pour the water from the pink bowl into the sink with right gripper", - "Place the chopsticks on the bowl", - "Abnormal", - "Pick up the spoon", - "Rinse the spoon", - "Place yellow bowl on the table with right gripper", - "Place yellow bowl into the sink with right gripper", - "Place the spoon on the plate", - "Put a plate back to the table from the sink", - "Turn off the faucet", - "Pick up the plate", - "Place the spoon on the bowl", - "Close the dispenser valve with right gripper", - "Grasp a chopstick with right gripper", - "Grasp pink bowl with right gripper", - "Turn on the faucet", - "Place pink bowl on the table with right gripper", - "Put a plate into the sink", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_wash_the_tableware_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_wash_the_tableware_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AIRBOT_MMK2_store_peaches_and_pears": { - "path": "AIRBOT_MMK2_store_peaches_and_pears", - "dataset_name": "store_peaches_and_pears", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place the pear into the right compartment of the storage box with the right gripper", - "objects": [ { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the pear in the light basket with right gripper", + "subtask_index": 0 }, { - "object_name": "peache", - "level1": "fruit", - "level2": "peache", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the shampoo with left gripper", + "subtask_index": 1 }, { - "object_name": "pear", - "level1": "fruit", - "level2": "pear", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the pink marker pen in the dark basket with right gripper", + "subtask_index": 2 }, { - "object_name": "compartment", - "level1": "container", - "level2": "compartment", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-11276", - "dataset_size": "343.0MB", - "statistics": { - "total_episodes": 48, - "total_frames": 11276, - "total_tasks": 1, - "total_videos": 192, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "c95d9ed0-71ed-4c90-94ec-30af0eb88215", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the pear into the right compartment of the storage box with the right gripper", - "Place the peach into the left compartment of the storage box with the left gripper", - "Abnormal", - "Grasp a peach with the left gripper", - "End", - "Grasp a pear with the right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_store_peaches_and_pears_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_store_peaches_and_pears_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "G1edu-u3_pullBowl_storage_bread_b": { - "path": "G1edu-u3_pullBowl_storage_bread_b", - "dataset_name": "pullBowl__storage2bread_unordered", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "walk" - ], - "tasks": "Move the pink bowl to the center of table with right hand", - "objects": [ + "subtask": "Pick up the pear with left gripper", + "subtask_index": 3 + }, { - "object_name": "chair", - "level1": "furniture", - "level2": "chair", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the lime in the light basket with right gripper", + "subtask_index": 4 }, { - "object_name": "bear_doll", - "level1": "toy", - "level2": "bear_doll", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-162960", - "dataset_size": "3.3GB", - "statistics": { - "total_episodes": 186, - "total_frames": 162960, - "total_tasks": 1, - "total_videos": 558, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "11ed8a7f-106a-4c08-98a4-0abdd1b97a48", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Move the pink bowl to the center of table with right hand", - "Static", - "Grasp the round bread with left hand", - "End", - "Move towards the toy bear on the chair", - "Grasp the long bread with left hand", - "Place the round bread in pink bowl with left hand", - "Abnormal", - "Place the long bread in pink bowl with left hand", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_pullBowl_storage_bread_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_pullBowl_storage_bread_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AIRBOT_MMK2_the_cup_is_put_into_the_bucket": { - "path": "AIRBOT_MMK2_the_cup_is_put_into_the_bucket", - "dataset_name": "the_cup_is_put_into_the_bucket", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "open", - "hold" - ], - "tasks": "Lift the cup the right gripper", - "objects": [ + "subtask": "Place the laundry detergent in the dark basket with right gripper", + "subtask_index": 5 + }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the mango in the light basket with right gripper", + "subtask_index": 6 }, { - "object_name": "paper_cup", - "level1": "container", - "level2": "paper_cup", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the light brown cup with left gripper", + "subtask_index": 7 }, { - "object_name": "bucket", - "level1": "container", - "level2": "bucket", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-9335", - "dataset_size": "369.4MB", - "statistics": { - "total_episodes": 50, - "total_frames": 9335, - "total_tasks": 1, - "total_videos": 200, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "3d103654-86bc-4dae-b569-be335fe5d8df", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Lift the cup the right gripper", - "Static", - "End", - "Grasp the cup the right gripper", - "Lift the cup the left gripper", - "Grasp the cup the left gripper", - "place the cup in the yellow basket use the right gripper", - "place the cup in the yellow basket use the left gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_the_cup_is_put_into_the_bucket_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_the_cup_is_put_into_the_bucket_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "leju_robot_box_storage_parcel_d": { - "path": "leju_robot_box_storage_parcel_d", - "dataset_name": "box_storage_parcel_d", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Pass the laundry detergent to the left gripper", - "objects": [ + "subtask": "Place the orange in the light basket with right gripper", + "subtask_index": 8 + }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the dark brown cup with left gripper", + "subtask_index": 9 }, { - "object_name": "box", - "level1": "home_storage", - "level2": "box", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the dark brown cup in the dark basket with left gripper", + "subtask_index": 10 }, { - "object_name": "parcel", - "level1": "container", - "level2": "parcel", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the light brown cup with right gripper", + "subtask_index": 11 }, { - "object_name": "conveyor_belt", - "level1": "industrial_equipment", - "level2": "conveyor_belt", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-288956", - "dataset_size": "18.0GB", - "statistics": { - "total_episodes": 212, - "total_frames": 288956, - "total_tasks": 1, - "total_videos": 636, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "a24f0c02-18b7-4770-8ed1-41d5db2ba6d8", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Pass the laundry detergent to the left gripper", - "Pick up the fast-moving consumer goods bottle from the table.", - "Take the fast-moving consumer goods bottle from the table.", - "Abnormal", - "Grasp the laundry detergent with right gripper", - "Place the laundry detergent on the yellow rubber mat with left gripper", - "Flip the fast-moving consumer goods bottle to the front side.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_box_storage_parcel_d_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_box_storage_parcel_d_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Cobot_Magic_move_the_cup": { - "path": "Cobot_Magic_move_the_cup", - "dataset_name": "move_the_cup", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Grasp the transparent glass cup", - "objects": [ + "subtask": "Pick up pink marker pen with right gripper", + "subtask_index": 12 + }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the lime with left gripper", + "subtask_index": 13 }, { - "object_name": "glass_cup", - "level1": "container", - "level2": "glass_cup", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up kiwi with left gripper", + "subtask_index": 14 }, { - "object_name": "towel", - "level1": "clothing", - "level2": "towel", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-28338", - "dataset_size": "1.1GB", - "statistics": { - "total_episodes": 100, - "total_frames": 28338, - "total_tasks": 1, - "total_videos": 300, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 50 - }, - "dataset_uuid": "503e7d0b-c281-4ea6-98ff-ebe74764c6ad", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the transparent glass cup", - "Place the glass cup on the blue zone", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_move_the_cup_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_move_the_cup_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "R1_Lite_storage_of_toiletries": { - "path": "R1_Lite_storage_of_toiletries", - "dataset_name": "storage_of_toiletries", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place the toothpaste in the storage bag", - "objects": [ + "subtask": "Pick up mango with left gripper", + "subtask_index": 15 + }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the shampoo in the dark basket with right gripper", + "subtask_index": 16 }, { - "object_name": "storage_bag", - "level1": "container", - "level2": "storage_bag", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the dark brown cup in the dark basket with right gripper", + "subtask_index": 17 }, { - "object_name": "toothpaste", - "level1": "personal_care_products", - "level2": "toothpaste", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the pomegranate with right gripper", + "subtask_index": 18 }, { - "object_name": "toothbrush", - "level1": "personal_care_products", - "level2": "toothbrush", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the pear with right gripper", + "subtask_index": 19 }, { - "object_name": "comb", - "level1": "personal_care_products", - "level2": "comb", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-138432", - "dataset_size": "7.0GB", - "statistics": { - "total_episodes": 101, - "total_frames": 138432, - "total_tasks": 1, - "total_videos": 303, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "25f2cd96-c284-44a9-b59c-98e204e16347", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the toothpaste in the storage bag", - "Pick up the comb", - "Place the comb on the table", - "Place the comb into the storage bag", - "Pick up the storage bag", - "Pick up a toothbrush", - "Place the toothpaste on the table", - "abnormal", - "Pick up the toothpaste", - "Open the storage bag", - "Close the strorage bag and place it on the desk", - "Place the toothbrush in the storage bag", - "Hand it to the other hand", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_storage_of_toiletries_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_storage_of_toiletries_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AgiBot-g1_storage_item_d": { - "path": "AgiBot-g1_storage_item_d", - "dataset_name": "storage_item_d", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place", - "pick" - ], - "tasks": "Grab and pick up both the mouse and power cord from the accessory packaging area at the same time", - "objects": [ + "subtask": "Pick up the dark brown cup with right gripper", + "subtask_index": 20 + }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the red cup with right gripper", + "subtask_index": 21 }, { - "object_name": "data_cable", - "level1": "tool", - "level2": "data_cable", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the pomegranate in the light basket with left gripper", + "subtask_index": 22 }, { - "object_name": "box", - "level1": "container", - "level2": "box", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the avocado with right gripper", + "subtask_index": 23 }, { - "object_name": "mouse", - "level1": "tool", - "level2": "mouse", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-180148", - "dataset_size": "86.0GB", - "statistics": { - "total_episodes": 450, - "total_frames": 180148, - "total_tasks": 1, - "total_videos": 3600, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "49f10ba2-bad7-46fc-ae4b-f6142efa81a2", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grab and pick up both the mouse and power cord from the accessory packaging area at the same time", - "Place the mouse and power cord into the box", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AgiBot-g1_storage_item_d_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_storage_item_d_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Agilex_Cobot_Magic_erase_board_passing_right_to_left": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "license": "apache-2.0", - "configs": [ + "subtask": "Pick up the hard facial cleanser with left gripper", + "subtask_index": 24 + }, { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } - ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + "subtask": "Place the Incense box in the dark basket with left gripper", + "subtask_index": 25 }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_erase_board_passing_right_to_left", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "education", - "level2": "school", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ { - "object_name": "table", - "level1": "home_storage", - "level2": "table", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the shampoo with right gripper", + "subtask_index": 26 }, { - "object_name": "whiteboard", - "level1": "stationery", - "level2": "whiteboard", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the banana in the light basket with right gripper", + "subtask_index": 27 }, { - "object_name": "whiteboard_eraser", - "level1": "stationery", - "level2": "whiteboard_eraser", - "level3": null, - "level4": null, - "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "use the right gripper to place the eraser in the left, use the left gripper to pick up the eraser, wipe the notes on the whiteboard clean, and then put it down." - ], - "sub_tasks": [ + "subtask": "Place the laundry detergent in the dark basket with left gripper", + "subtask_index": 28 + }, { - "subtask": "Place the eraser with the left gripper", - "subtask_index": 0 + "subtask": "Pick up the orange with right gripper", + "subtask_index": 29 }, { - "subtask": "Grasp the eraser with the left gripper", - "subtask_index": 1 + "subtask": "Place the mangosteen in the light basket with right gripper", + "subtask_index": 30 }, { - "subtask": "Abnormal", - "subtask_index": 2 + "subtask": "Place the hard facial cleanser in the dark basket with right gripper", + "subtask_index": 31 + }, + { + "subtask": "Pick up Incense box with right gripper", + "subtask_index": 32 + }, + { + "subtask": "Place the red marker in the dark basket with right gripper", + "subtask_index": 33 + }, + { + "subtask": "Pick up the lemon with left gripper", + "subtask_index": 34 + }, + { + "subtask": "Place the banana in the light basket with left gripper", + "subtask_index": 35 + }, + { + "subtask": "Pick up Incense box with left gripper", + "subtask_index": 36 + }, + { + "subtask": "Pick up kiwi with right gripper", + "subtask_index": 37 + }, + { + "subtask": "Pick up the mango with right gripper", + "subtask_index": 38 + }, + { + "subtask": "Place the lemon in the light basket with left gripper", + "subtask_index": 39 + }, + { + "subtask": "Pick up the laundry detergent with left gripper", + "subtask_index": 40 + }, + { + "subtask": "Pick up toothpaste with right gripper", + "subtask_index": 41 + }, + { + "subtask": "Pick up the bread with right gripper", + "subtask_index": 42 + }, + { + "subtask": "Pick up toothpaste with left gripper", + "subtask_index": 43 + }, + { + "subtask": "Place the red cup in the dark basket with left gripper", + "subtask_index": 44 + }, + { + "subtask": "Place the red cup in the dark basket with right gripper", + "subtask_index": 45 + }, + { + "subtask": "Place the lemon in the light basket with right gripper", + "subtask_index": 46 + }, + { + "subtask": "Pick up banana with left gripper", + "subtask_index": 47 + }, + { + "subtask": "Place the avocado in the light basket with right gripper", + "subtask_index": 48 + }, + { + "subtask": "Pick up egg yolk pastry with left gripper", + "subtask_index": 49 + }, + { + "subtask": "Place the light brown cup in the dark basket with left gripper", + "subtask_index": 50 + }, + { + "subtask": "Pick up egg yolk pastry with right gripper", + "subtask_index": 51 + }, + { + "subtask": "Place the orange in the light basket with left gripper", + "subtask_index": 52 + }, + { + "subtask": "Place the hard facial cleanser in the dark basket with left gripper", + "subtask_index": 53 + }, + { + "subtask": "Pick up the lemon with right gripper", + "subtask_index": 54 + }, + { + "subtask": "Place the kiwi in the light basket with right gripper", + "subtask_index": 55 }, { "subtask": "End", - "subtask_index": 3 + "subtask_index": 56 }, { - "subtask": "Move the eraser to the left side of the blackboard with the right gripper", - "subtask_index": 4 + "subtask": "Pick up gray cup with left gripper", + "subtask_index": 57 }, { - "subtask": "Grasp the eraser and wipe the blackboard with the left gripper", - "subtask_index": 5 + "subtask": "Place the pomegranate in the light basket with right gripper", + "subtask_index": 58 + }, + { + "subtask": "Pick up banana with right gripper", + "subtask_index": 59 + }, + { + "subtask": "Place the blackboard eraser in the dark basket with left gripper", + "subtask_index": 60 + }, + { + "subtask": "Pick up the red marker with right gripper", + "subtask_index": 61 + }, + { + "subtask": "Place the red marker in the dark basket with left gripper", + "subtask_index": 62 + }, + { + "subtask": "Place the gray cup in the dark basket with left gripper", + "subtask_index": 63 + }, + { + "subtask": "Pick up the red cup with left gripper", + "subtask_index": 64 + }, + { + "subtask": "Place the kiwi in the light basket with left gripper", + "subtask_index": 65 + }, + { + "subtask": "Pick up the mangosteen with left gripper", + "subtask_index": 66 + }, + { + "subtask": "Pick up the orange with left gripper", + "subtask_index": 67 + }, + { + "subtask": "Place the mangosteen in the light basket with left gripper", + "subtask_index": 68 + }, + { + "subtask": "Place the bread in the light basket with right gripper", + "subtask_index": 69 + }, + { + "subtask": "Place the toothpaste in the dark basket with right gripper", + "subtask_index": 70 + }, + { + "subtask": "Pick up the lime with right gripper", + "subtask_index": 71 + }, + { + "subtask": "Place the mango in the light basket with left gripper", + "subtask_index": 72 + }, + { + "subtask": "Pick up the mangosteen with right gripper", + "subtask_index": 73 + }, + { + "subtask": "Place the pink marker pen in the dark basket with left gripper", + "subtask_index": 74 + }, + { + "subtask": "Pick up the red marker with left gripper", + "subtask_index": 75 + }, + { + "subtask": "Place the pear in the light basket with left gripper", + "subtask_index": 76 + }, + { + "subtask": "Pick up blackboard eraser with right gripper", + "subtask_index": 77 + }, + { + "subtask": "Place the blackboard eraser in the dark basket with right gripper", + "subtask_index": 78 + }, + { + "subtask": "Place the shampoo in the dark basket with left gripper", + "subtask_index": 79 + }, + { + "subtask": "Place the egg yolk pastry in the light basket with left gripper", + "subtask_index": 80 + }, + { + "subtask": "Pick up the pomegranate with left gripper", + "subtask_index": 81 + }, + { + "subtask": "Place the toothpaste in the dark basket with left gripper", + "subtask_index": 82 + }, + { + "subtask": "Pick up blackboard eraser with left gripper", + "subtask_index": 83 + }, + { + "subtask": "Place the Incense box in the dark basket with right gripper", + "subtask_index": 84 + }, + { + "subtask": "Pick up the hard facial cleanser with right gripper", + "subtask_index": 85 + }, + { + "subtask": "Place the apple in the light basket with right gripper", + "subtask_index": 86 + }, + { + "subtask": "Place the avocado in the light basket with left gripper", + "subtask_index": 87 + }, + { + "subtask": "Pick up pink marker pen with left gripper", + "subtask_index": 88 + }, + { + "subtask": "Pick up the avocado with left gripper", + "subtask_index": 89 + }, + { + "subtask": "Pick up the laundry detergent with right gripper", + "subtask_index": 90 + }, + { + "subtask": "Place the egg yolk pastry in the light basket with right gripper", + "subtask_index": 91 + }, + { + "subtask": "Pick up the apple with right gripper", + "subtask_index": 92 + }, + { + "subtask": "Place the lime in the light basket with left gripper", + "subtask_index": 93 }, { "subtask": "null", - "subtask_index": 6 + "subtask_index": 94 } ], "atomic_actions": [ "grasp", "lift", - "wipe", - "handover" + "lower" ], "robot_name": [ "Agilex_Cobot_Magic" @@ -29208,23 +28833,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 47, - "total_frames": 38184, + "total_episodes": 197, + "total_frames": 337837, "fps": 30, - "total_tasks": 7, - "total_videos": 141, + "total_tasks": 95, + "total_videos": 591, "total_chunks": 1, "chunks_size": 1000, "state_dim": 26, "action_dim": 26, "camera_views": 3, - "dataset_size": "334.71 MB" + "dataset_size": "4.51 GB" }, - "frame_num": 38184, - "dataset_size": "334.71 MB", - "data_structure": "Agilex_Cobot_Magic_erase_board_passing_right_to_left_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 337837, + "dataset_size": "4.51 GB", + "data_structure": "Agilex_Cobot_Magic_classify_objects_eight_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (185 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:46" + "train": "0:196" }, "features": { "observation.images.cam_head_rgb": { @@ -29595,7 +29220,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Galaxea_R1_Lite_classify_object_five": { + "Airbot_MMK2_click_pen": { "task_categories": [ "robotics" ], @@ -29625,11 +29250,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_classify_object_five", + "dataset_name": "Airbot_MMK2_click_pen", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "commercial_convenience", - "level2": "supermarket", + "level1": "education", + "level2": "school", "level3": null, "level4": null, "level5": null @@ -29637,49 +29262,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "brown_basket", - "level1": "baskets", - "level2": "brown_basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yellow_basket", - "level1": "baskets", - "level2": "yellow_basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "any_fruits", - "level1": "fruits", - "level2": "any_fruits", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "any_vegetables", - "level1": "vegetables", - "level2": "any_vegetables", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "any_snacks", - "level1": "snacks", - "level2": "any_snacks", + "object_name": "block", + "level1": "toy", + "level2": "block", "level3": null, "level4": null, "level5": null }, { - "object_name": "any_bread", - "level1": "bread", - "level2": "any_bread", + "object_name": "pen", + "level1": "stationery", + "level2": "pen", "level3": null, "level4": null, "level5": null @@ -29687,263 +29288,60 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "place the food in the right basket with the right gripper, and place the non food items in the left basket with the left gripper." + "pick up the pen with your hand, press the pen switch and then place it on the table." ], "sub_tasks": [ { - "subtask": "Grasp the rubiks cube and put it in the left basket", + "subtask": "End", "subtask_index": 0 }, { - "subtask": "Grasp the waffle and put it in the right basket", + "subtask": "Lift the pen with the right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the soft cleanser and put it in the left basket", + "subtask": "Grasp the pen with the right gripper", "subtask_index": 2 }, { - "subtask": "Grasp the back scratcher and put it in the left basket", + "subtask": "Place the pen on the table with the right gripper", "subtask_index": 3 }, { - "subtask": "Grasp the apple and put it in the right basket", + "subtask": "Abnormal", "subtask_index": 4 }, { - "subtask": "End", + "subtask": "Press the pen switch with the right gripper", "subtask_index": 5 }, - { - "subtask": "Grasp the white eraser and put it in the left basket", - "subtask_index": 6 - }, - { - "subtask": "Grasp the square chewing gum and put it in the right basket", - "subtask_index": 7 - }, - { - "subtask": "Grasp the power strip and put it in the left basket", - "subtask_index": 8 - }, - { - "subtask": "Grasp the cleaning agent and put it in the left basket", - "subtask_index": 9 - }, - { - "subtask": "Grasp the soda water and put it in the right basket", - "subtask_index": 10 - }, - { - "subtask": "Grasp the spoon and put it in the left basket", - "subtask_index": 11 - }, - { - "subtask": "Grasp the duck toys and put it in the left basket", - "subtask_index": 12 - }, - { - "subtask": "Grasp the triangle cake and put it in the right basket", - "subtask_index": 13 - }, - { - "subtask": "Grasp the compass and put it in the right basket", - "subtask_index": 14 - }, - { - "subtask": "Grasp the cookie and put it in the right basket", - "subtask_index": 15 - }, - { - "subtask": "Grasp the compass and put it in the left basket", - "subtask_index": 16 - }, - { - "subtask": "Grasp the orange and put it in the right basket", - "subtask_index": 17 - }, - { - "subtask": "Grasp the ballpoint pen and put it in the left basket", - "subtask_index": 18 - }, - { - "subtask": "Grasp the round bread and put it in the right basket", - "subtask_index": 19 - }, - { - "subtask": "Grasp the egg yolk pastry and put it in the right basket", - "subtask_index": 20 - }, - { - "subtask": "Grasp the lemon and put it in the right basket", - "subtask_index": 21 - }, - { - "subtask": "Grasp the soap and put it in the left basket", - "subtask_index": 22 - }, - { - "subtask": "Grasp the washing liquid and put it in the left basket", - "subtask_index": 23 - }, - { - "subtask": "Grasp the hard cleanser and put it in the left basket", - "subtask_index": 24 - }, - { - "subtask": "Grasp the milk and put it in the right basket", - "subtask_index": 25 - }, - { - "subtask": "Grasp the black marker and put it in the left basket", - "subtask_index": 26 - }, - { - "subtask": "Grasp the banana and put it in the right basket", - "subtask_index": 27 - }, - { - "subtask": "Grasp the black glass cup and put it in the left basket", - "subtask_index": 28 - }, - { - "subtask": "Grasp the brush and put it in the left basket", - "subtask_index": 29 - }, - { - "subtask": "Grasp the bath ball and put it in the left basket", - "subtask_index": 30 - }, - { - "subtask": "Grasp the blue towel and put it in the left basket", - "subtask_index": 31 - }, - { - "subtask": "Grasp the tea cup and put it in the left basket", - "subtask_index": 32 - }, - { - "subtask": "Grasp the peeler and put it in the left basket", - "subtask_index": 33 - }, - { - "subtask": "Grasp the brown towel and put it in the left basket", - "subtask_index": 34 - }, - { - "subtask": "Grasp the peach and put it in the right basket", - "subtask_index": 35 - }, - { - "subtask": "Abnormal", - "subtask_index": 36 - }, - { - "subtask": "Grasp the chocolate and put it in the right basket", - "subtask_index": 37 - }, - { - "subtask": "Grasp the grey towel and put it in the left basket", - "subtask_index": 38 - }, - { - "subtask": "Grasp the canned cola and put it in the right basket", - "subtask_index": 39 - }, - { - "subtask": "Grasp the tape and put it in the left basket", - "subtask_index": 40 - }, - { - "subtask": "Grasp the bread slice and put it in the right basket", - "subtask_index": 41 - }, - { - "subtask": "Grasp the tin and put it in the right basket", - "subtask_index": 42 - }, - { - "subtask": "Grasp the soap and put it in the right basket", - "subtask_index": 43 - }, - { - "subtask": "Grasp the glasses case and put it in the left basket", - "subtask_index": 44 - }, - { - "subtask": "Grasp the yellow duck and put it in the right basket", - "subtask_index": 45 - }, - { - "subtask": "Grasp the peach doll and put it in the right basket", - "subtask_index": 46 - }, - { - "subtask": "Grasp the blue cup and put it in the left basket", - "subtask_index": 47 - }, - { - "subtask": "Grasp the pen container and put it in the left basket", - "subtask_index": 48 - }, - { - "subtask": "Grasp the red duck and put it in the left basket", - "subtask_index": 49 - }, - { - "subtask": "Grasp the lime and put it in the right basket", - "subtask_index": 50 - }, - { - "subtask": "Grasp the long bread and put it in the right basket", - "subtask_index": 51 - }, - { - "subtask": "Grasp the yogurt and put it in the right basket", - "subtask_index": 52 - }, - { - "subtask": "Grasp the potato chips and put it in the right basket", - "subtask_index": 53 - }, - { - "subtask": "Grasp the can and put it in the right basket", - "subtask_index": 54 - }, - { - "subtask": "Grasp the ad milk and put it in the right basket", - "subtask_index": 55 - }, - { - "subtask": "Grasp the blue marker and put it in the left basket", - "subtask_index": 56 - }, { "subtask": "null", - "subtask_index": 57 + "subtask_index": 6 } ], "atomic_actions": [ - "grasp", + "garsp", "pick", - "place" + "place", + "pressbutton" ], "robot_name": [ - "Galaxea_R1_Lite" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -29960,30 +29358,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 195, - "total_frames": 160875, + "total_episodes": 100, + "total_frames": 30984, "fps": 30, - "total_tasks": 58, - "total_videos": 780, + "total_tasks": 7, + "total_videos": 400, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, + "state_dim": 36, + "action_dim": 36, "camera_views": 4, - "dataset_size": "8.91 GB" + "dataset_size": "1.12 GB" }, - "frame_num": 160875, - "dataset_size": "8.91 GB", - "data_structure": "Galaxea_R1_Lite_classify_object_five_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (183 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 30984, + "dataset_size": "1.12 GB", + "data_structure": "Airbot_MMK2_click_pen_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (88 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:194" + "train": "0:99" }, "features": { - "observation.images.cam_head_left_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -29992,8 +29390,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -30002,11 +29400,11 @@ "has_audio": false } }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -30015,8 +29413,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -30025,11 +29423,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -30038,8 +29436,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -30048,11 +29446,11 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_front_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -30061,8 +29459,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -30074,519 +29472,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" - ] - }, - "action": { - "dtype": "float32", - "shape": [ - 14 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" - ] - }, - "timestamp": { - "dtype": "float32", - "shape": [ - 1 - ], - "names": null - }, - "frame_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "episode_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "task_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "subtask_annotation": { - "names": null, - "shape": [ - 5 - ], - "dtype": "int32" - }, - "scene_annotation": { - "names": null, - "shape": [ - 1 - ], - "dtype": "int32" - }, - "eef_sim_pose_state": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_sim_pose_action": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_direction_action": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - } - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" - } - ] - }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" - }, - "Airbot_MMK2_move_pan": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } - ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_move_pan", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "household", - "level2": "kitchen", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ - { - "object_name": "pan", - "level1": "cookware", - "level2": "pan", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "square_building_blocks", - "level1": "toys", - "level2": "square_building_blocks", - "level3": null, - "level4": null, - "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "push the pan to the edge of the table with your left hand and place it on the building block with your right hand." - ], - "sub_tasks": [ - { - "subtask": "Grasp the frying pan with the right gripper", - "subtask_index": 0 - }, - { - "subtask": "Abnormal", - "subtask_index": 1 - }, - { - "subtask": "Place the frying pan on the red cube block with the right gripper", - "subtask_index": 2 - }, - { - "subtask": "Push the frying pan on left to right with the left gripper", - "subtask_index": 3 - }, - { - "subtask": "End", - "subtask_index": 4 - }, - { - "subtask": "null", - "subtask_index": 5 - } - ], - "atomic_actions": [ - "push", - "grasp", - "pick", - "place" - ], - "robot_name": [ - "Airbot_MMK2" - ], - "end_effector_type": "five_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" - ], - "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" - }, - "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" - ], - "statistics": { - "total_episodes": 48, - "total_frames": 14881, - "fps": 30, - "total_tasks": 6, - "total_videos": 192, - "total_chunks": 1, - "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "764.06 MB" - }, - "frame_num": 14881, - "dataset_size": "764.06 MB", - "data_structure": "Airbot_MMK2_move_pan_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", - "splits": { - "train": "0:47" - }, - "features": { - "observation.images.cam_head_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_left_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_right_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.state": { - "dtype": "float32", - "shape": [ - 36 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -30842,7 +29728,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_push_toy_car": { + "agilex_cobot_magic_pass_object_right_to_left_green_tablecloth": { "task_categories": [ "robotics" ], @@ -30872,11 +29758,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_push_toy_car", + "dataset_name": "agilex_cobot_magic_pass_object_right_to_left_green_tablecloth", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "living_room", + "level1": "commercial_convenience", + "level2": "supermarket", "level3": null, "level4": null, "level5": null @@ -30892,9 +29778,137 @@ "level5": null }, { - "object_name": "toy_car", - "level1": "toy", - "level2": "toy_car", + "object_name": "ambrosial_yogurt", + "level1": "food", + "level2": "ambrosial_yogurt", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "food", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "long_bread", + "level1": "food", + "level2": "long_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "milk", + "level1": "food", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yogurt", + "level1": "food", + "level2": "yogurt", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "grape", + "level1": "food", + "level2": "grape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ham_sausage", + "level1": "food", + "level2": "ham_sausage", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eggplant", + "level1": "food", + "level2": "eggplant", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eyeglass_case", + "level1": "laboratory_supplies", + "level2": "eyeglass_case", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rubik's_cube", + "level1": "toys", + "level2": "rubik's_cube", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "purple_trash_bag", + "level1": "trash", + "level2": "purple_trash_bag", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cleanser", + "level1": "daily_necessities", + "level2": "cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_necessities", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "whiteboard_eraser", + "level1": "stationery", + "level2": "whiteboard_eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "candle", + "level1": "daily_necessities", + "level2": "candle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_table_cloths", + "level1": "laboratory_supplies", + "level2": "green_table_cloths", "level3": null, "level4": null, "level5": null @@ -30902,55 +29916,249 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "push the toy car by hand." + "use the right gripper to pick up the item and transfer it from the right gripper to the left gripper." ], "sub_tasks": [ { - "subtask": "End", + "subtask": "The left gripper places milk on the left side of the table", "subtask_index": 0 }, { - "subtask": "Static", + "subtask": "The left gripper places bread on the left side of the table", "subtask_index": 1 }, { - "subtask": "Push the toy car from left to right with the left gripper", + "subtask": "Use the right gripper to grab the shampoo on the right side of the table", "subtask_index": 2 }, { - "subtask": "Abnormal", + "subtask": "Use the right to grab the bread on the right side of table", "subtask_index": 3 }, { - "subtask": "null", + "subtask": "Pass the bread to the left gripper", "subtask_index": 4 + }, + { + "subtask": "Use the right gripper to grab the bread on the right side of the table", + "subtask_index": 5 + }, + { + "subtask": "The left gripper places grape on the left side of the table\n", + "subtask_index": 6 + }, + { + "subtask": "End", + "subtask_index": 7 + }, + { + "subtask": "The left gripper places bread on the left side of the table\n", + "subtask_index": 8 + }, + { + "subtask": "Pass the eggplant to the left gripper", + "subtask_index": 9 + }, + { + "subtask": "The left gripper places yogurt on the left side of the table", + "subtask_index": 10 + }, + { + "subtask": "Pass the purple garbage bag to the left gripper", + "subtask_index": 11 + }, + { + "subtask": "Use the right gripper to grab the xbreadon the right side of the table\n", + "subtask_index": 12 + }, + { + "subtask": "The left gripper places yogurt on the left side of the table", + "subtask_index": 13 + }, + { + "subtask": "Pass the Rubik's Cube to the left gripper", + "subtask_index": 14 + }, + { + "subtask": "Use the right gripper to grab the grape on the right side of the table\n", + "subtask_index": 15 + }, + { + "subtask": "Use the right gripper to grab the grape on the right side of the table", + "subtask_index": 16 + }, + { + "subtask": "Pass the yogurt to the left gripper", + "subtask_index": 17 + }, + { + "subtask": "Pass the bread to the left gripper\n", + "subtask_index": 18 + }, + { + "subtask": "Use the right gripper to grab the yogurt on the right side of the table\n", + "subtask_index": 19 + }, + { + "subtask": "Pass the grape to the left grippe", + "subtask_index": 20 + }, + { + "subtask": "Use the right gripper to grab the milk on the right side of the table\n", + "subtask_index": 21 + }, + { + "subtask": "The left gripper places shampoo on the left side of the table", + "subtask_index": 22 + }, + { + "subtask": "The left gripper places grape on the left side of the table", + "subtask_index": 23 + }, + { + "subtask": "The left gripper places eggplant on the left side of the table", + "subtask_index": 24 + }, + { + "subtask": "Use the right gripper to grab the milk on the right side of the table", + "subtask_index": 25 + }, + { + "subtask": "Pass the milk to the left gripper", + "subtask_index": 26 + }, + { + "subtask": "The left gripper places banana on the left side of the table", + "subtask_index": 27 + }, + { + "subtask": "The left gripper places milk on the left side of the table", + "subtask_index": 28 + }, + { + "subtask": "The left gripper places xx on the left side of the table", + "subtask_index": 29 + }, + { + "subtask": "Pass the shampoo to the left gripper", + "subtask_index": 30 + }, + { + "subtask": "\nPass the bread to the left gripper\n", + "subtask_index": 31 + }, + { + "subtask": "Use the right gripper to grab the bread on the right side of the table\n", + "subtask_index": 32 + }, + { + "subtask": "The left gripper places purple garbage bag on the left side of the table", + "subtask_index": 33 + }, + { + "subtask": "Use the right gripper to grab the xx on the right side of the table", + "subtask_index": 34 + }, + { + "subtask": "Pass the milk to the left gripper\n", + "subtask_index": 35 + }, + { + "subtask": "Pass the banana to the left gripper", + "subtask_index": 36 + }, + { + "subtask": "Pass the shampoo to the left gripper\n", + "subtask_index": 37 + }, + { + "subtask": "The left gripper places Rubik's Cube on the left side of the table", + "subtask_index": 38 + }, + { + "subtask": "Use the right gripper to grab the purple garbage bag on the right side of the table", + "subtask_index": 39 + }, + { + "subtask": "\nPass the yogurt to the left gripper", + "subtask_index": 40 + }, + { + "subtask": "\nPass the bread to the left gripper", + "subtask_index": 41 + }, + { + "subtask": "Use the right gripper to shampoo the on the right side of the table", + "subtask_index": 42 + }, + { + "subtask": "User the right gripper to grab the grape on the right side of the table\n", + "subtask_index": 43 + }, + { + "subtask": "The left gripper places milk on the left side of the table\n", + "subtask_index": 44 + }, + { + "subtask": "Use the right gripper to grab the yogurt on the right side of the table", + "subtask_index": 45 + }, + { + "subtask": "The left gripper places yogurt on the left side of the table\n", + "subtask_index": 46 + }, + { + "subtask": "Use the right gripper to grab the shampoo on the right side of the table", + "subtask_index": 47 + }, + { + "subtask": "Use the right gripper to grab the Rubik's Cube on the right side of the table", + "subtask_index": 48 + }, + { + "subtask": "\nPass the milk to the left gripper\n", + "subtask_index": 49 + }, + { + "subtask": "Pass the grape to the left gripper", + "subtask_index": 50 + }, + { + "subtask": "Pass the grape to the left gripper\n", + "subtask_index": 51 + }, + { + "subtask": "null", + "subtask_index": 52 } ], "atomic_actions": [ - "push" + "grasp", + "lift", + "lower", + "handover", + "takeover" ], "robot_name": [ - "Airbot_MMK2" + "agilex_cobot_magic" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -30961,23 +30169,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 6897, + "total_episodes": 98, + "total_frames": 69065, "fps": 30, - "total_tasks": 5, - "total_videos": 200, + "total_tasks": 53, + "total_videos": 294, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "175.81 MB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "884.48 MB" }, - "frame_num": 6897, - "dataset_size": "175.81 MB", - "data_structure": "Airbot_MMK2_push_toy_car_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 69065, + "dataset_size": "884.48 MB", + "data_structure": "Agilex_Cobot_Magic_pass_object_right_to_left_green_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:49" + "train": "0:97" }, "features": { "observation.images.cam_head_rgb": { @@ -31049,33 +30257,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, "observation.state": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -31084,42 +30269,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -31128,36 +30303,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -31197,17 +30362,17 @@ }, "subtask_annotation": { "names": null, + "dtype": "int32", "shape": [ 5 - ], - "dtype": "int32" + ] }, "scene_annotation": { "names": null, + "dtype": "int32", "shape": [ 1 - ], - "dtype": "int32" + ] }, "eef_sim_pose_state": { "names": [ @@ -31224,10 +30389,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_sim_pose_action": { "names": [ @@ -31244,70 +30409,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } }, "authors": { @@ -31329,14 +30554,14 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "leju_robot_pass_the_cleaner_d": { - "path": "leju_robot_pass_the_cleaner_d", - "dataset_name": "pass_the_cleaner_d", + "alpha_bot_2_sticker": { + "path": "alpha_bot_2_sticker", + "dataset_name": "sticker", "robot_type": "", "end_effector_type": [ - "five_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ @@ -31344,7 +30569,7 @@ "pick", "place" ], - "tasks": "Turn the bottle to the front side.", + "tasks": "End", "objects": [ { "object_name": "table", @@ -31355,27 +30580,27 @@ "level5": null }, { - "object_name": "cleaner", - "level1": "daily_necessities", - "level2": "cleaner", + "object_name": "poster", + "level1": "toy", + "level2": "poster", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-168676", - "dataset_size": "8.7GB", + "frame_range": "0-97993", + "dataset_size": "1.9GB", "statistics": { - "total_episodes": 451, - "total_frames": 168676, + "total_episodes": 93, + "total_frames": 97993, "total_tasks": 1, - "total_videos": 1353, + "total_videos": 372, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "d1843bc1-83c4-4c84-8c34-c6740a7e82a9", + "dataset_uuid": "81aee7b0-84c3-4b85-9778-c5c9df188afd", "language": [ "en", "zh" @@ -31384,8 +30609,8 @@ "robotics" ], "sub_tasks": [ - "Turn the bottle to the front side.", - "Pick up the bottle from the table.", + "End", + "Stick the advertisement in the top-right corner area with both grippers", "null" ], "annotations": { @@ -31423,10 +30648,214 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "leju_robot_pass_the_cleaner_d_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_pass_the_cleaner_d_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "alpha_bot_2_sticker_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "alpha_bot_2_sticker_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Galaxea_R1_Lite_storage_object_gray_plate": { + "R1_Lite_stack_baskets": { + "path": "R1_Lite_stack_baskets", + "dataset_name": "stack_baskets", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick" + ], + "tasks": "Grasp the dark basket with the right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "basket", + "level1": "container", + "level2": "basket", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-57189", + "dataset_size": "4.1GB", + "statistics": { + "total_episodes": 101, + "total_frames": 57189, + "total_tasks": 1, + "total_videos": 404, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "4b7a3d8c-18c0-45af-9337-02adad3a72de", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the dark basket with the right gripper", + "End", + "Place the light basket on the middle of the table with the left gripper", + "Grasp the light basket with the left gripper", + "Place the dark basket into the light color basket with the right gripper", + "Static", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_stack_baskets_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_stack_baskets_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Galbot_g1_fold_clothe_b": { + "path": "Galbot_g1_fold_clothe_b", + "dataset_name": "fold_clothe_b", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place", + "flod" + ], + "tasks": "Abnormal", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "clothes", + "level1": "fabric", + "level2": "clothes", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-197328", + "dataset_size": "8.8GB", + "statistics": { + "total_episodes": 213, + "total_frames": 197328, + "total_tasks": 1, + "total_videos": 639, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "0dd63c25-960f-4489-88af-e3c5c38998c2", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Abnormal", + "use the left gripper to clamp the left edge of the fabric", + "Drag the clothes to the center of the table", + "End", + "use both grippers simultaneously to clamp the upper edge of the clothing fabric", + "use both grippers to drag the lower edge of the fabric forward and fold it over the upper edge", + "use both grippers to drag the upper edge of the fabric backward and fold it over the lower edge", + "use both grippers simultaneously to clamp the lower edge of the clothing fabric", + "Drag the clothes downward with both gripper", + "use the left gripper to drag the left edge of the fabric to the left and folds it over the right edge", + "Flip the folded clothes over with left gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Galbot_g1_fold_clothe_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Galbot_g1_fold_clothe_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Agilex_Cobot_Magic_erase_board_left": { "task_categories": [ "robotics" ], @@ -31456,11 +30885,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_storage_object_gray_plate", + "dataset_name": "Agilex_Cobot_Magic_erase_board_left", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "living_room", + "level1": "education", + "level2": "school", "level3": null, "level4": null, "level5": null @@ -31468,217 +30897,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "gray_plate", - "level1": "kitchen_supplies", - "level2": "gray_plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "fruits", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bathing_in_flowers", - "level1": "daily_necessities", - "level2": "bathing_in_flowers", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "blue_cup", - "level1": "cups", - "level2": "blue_cup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "blue_pot", - "level1": "kitchen_supplies", - "level2": "blue_pot", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "toast_slices", - "level1": "bread", - "level2": "toast_slices", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "brown_towel", - "level1": "towels", - "level2": "brown_towel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "can", - "level1": "snacks", - "level2": "can", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "coke", - "level1": "beverages", - "level2": "coke", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "potato_chips", - "level1": "snacks", - "level2": "potato_chips", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chocolate", - "level1": "snacks", - "level2": "chocolate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "compass", - "level1": "stationery", - "level2": "compass", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "block_pillar", - "level1": "toys", - "level2": "block_pillar", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_beater", - "level1": "kitchen_supplies", - "level2": "egg_beater", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "eraser", + "object_name": "whiteboard", "level1": "stationery", - "level2": "eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chewing_gum", - "level1": "snacks", - "level2": "chewing_gum", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mentholatum_facial_cleanser", - "level1": "daily_chemical_products", - "level2": "mentholatum_facial_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "green_lemon", - "level1": "fruits", - "level2": "green_lemon", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peach", - "level1": "fruits", - "level2": "peach", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "power_strip", - "level1": "electrical_control_equipment", - "level2": "power_strip", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "round_bread", - "level1": "bread", - "level2": "round_bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mentholatum_facial_cleanser", - "level1": "daily_chemical_products", - "level2": "mentholatum_facial_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "square_building_blocks", - "level1": "toys", - "level2": "square_building_blocks", + "level2": "whiteboard", "level3": null, "level4": null, "level5": null }, { - "object_name": "tape", + "object_name": "whiteboard_eraser", "level1": "stationery", - "level2": "tape", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cake", - "level1": "bread", - "level2": "cake", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "duck", - "level1": "doll", - "level2": "duck", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "ambrosial_yogurt", - "level1": "beverages", - "level2": "ambrosial_yogurt", + "level2": "whiteboard_eraser", "level3": null, "level4": null, "level5": null @@ -31686,405 +30923,60 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "use a gripper to pick the target object and place on the gray plate." + "use the left claw to pick up the whiteboard eraser, wipe the notes on the whiteboard clean, and then put them back in place." ], "sub_tasks": [ { - "subtask": "Place the tape on the gray plate with the left gripper", + "subtask": "Place the eraser with the left gripper", "subtask_index": 0 }, { - "subtask": "Grasp the blue pot with the left gripper", + "subtask": "Grasp the eraser with the left gripper", "subtask_index": 1 }, { - "subtask": "Grasp the plugboard with the left gripper", + "subtask": "Abnormal", "subtask_index": 2 }, { - "subtask": "Grasp the potato chips with the right gripper", + "subtask": "End", "subtask_index": 3 }, { - "subtask": "Grasp the banana with the left gripper", + "subtask": "Grasp the eraser and wipe the blackboard with the left gripper", "subtask_index": 4 }, - { - "subtask": "Place the round wooden block on the gray plate with the left gripper", - "subtask_index": 5 - }, - { - "subtask": "Place the peach on the gray plate with the left gripper", - "subtask_index": 6 - }, - { - "subtask": "Grasp the compasses with the right gripper", - "subtask_index": 7 - }, - { - "subtask": "Place the square chewing gum on the gray plate with the right gripper", - "subtask_index": 8 - }, - { - "subtask": "Place the compasses on the gray plate with the right gripper", - "subtask_index": 9 - }, - { - "subtask": "Place the potato chips on the gray plate with the right gripper", - "subtask_index": 10 - }, - { - "subtask": "Grasp the duck toy with the left gripper", - "subtask_index": 11 - }, - { - "subtask": "Place the green lemon on the gray plate with the right gripper", - "subtask_index": 12 - }, - { - "subtask": "Grasp the blue cup with the left gripper", - "subtask_index": 13 - }, - { - "subtask": "Place the square wooden block on the gray plate with the left gripper", - "subtask_index": 14 - }, - { - "subtask": "Grasp the soft facial cleanser with the left gripper", - "subtask_index": 15 - }, - { - "subtask": "Place the blue cup on the gray plate with the left gripper", - "subtask_index": 16 - }, - { - "subtask": "Grasp the square chewing gum with the left gripper", - "subtask_index": 17 - }, - { - "subtask": "Grasp the chocolate cake with the right gripper", - "subtask_index": 18 - }, - { - "subtask": "Grasp the shower sphere with the left gripper", - "subtask_index": 19 - }, - { - "subtask": "Place the blackboard erasure on the gray plate with the right gripper", - "subtask_index": 20 - }, - { - "subtask": "Grasp the plugboard with the right gripper", - "subtask_index": 21 - }, - { - "subtask": "Grasp the yogurt with the right gripper", - "subtask_index": 22 - }, - { - "subtask": "Grasp the brown towel with the left gripper", - "subtask_index": 23 - }, - { - "subtask": "Place the round bread on the gray plate with the right gripper", - "subtask_index": 24 - }, - { - "subtask": "Grasp the hard facial cleanser with the left gripper", - "subtask_index": 25 - }, - { - "subtask": "Place the shower sphere on the gray plate with the left gripper", - "subtask_index": 26 - }, - { - "subtask": "Grasp the chocolate with the right gripper", - "subtask_index": 27 - }, - { - "subtask": "Place the soft facial cleanser on the gray plate with the left gripper", - "subtask_index": 28 - }, - { - "subtask": "Grasp the peach with the right gripper", - "subtask_index": 29 - }, - { - "subtask": "Grasp the brown towel with the right gripper", - "subtask_index": 30 - }, - { - "subtask": "Place the shower sphere on the gray plate with the right gripper", - "subtask_index": 31 - }, - { - "subtask": "Grasp the back scratcher with the left gripper", - "subtask_index": 32 - }, - { - "subtask": "Place the square wooden block on the gray plate with the right gripper", - "subtask_index": 33 - }, - { - "subtask": "Place the blackboard erasure on the gray plate with the left gripper", - "subtask_index": 34 - }, - { - "subtask": "Place the yogurt on the gray plate with the right gripper", - "subtask_index": 35 - }, - { - "subtask": "Grasp the peach with the left gripper", - "subtask_index": 36 - }, - { - "subtask": "Place the coke on the gray plate with the left gripper", - "subtask_index": 37 - }, - { - "subtask": "Place the chocolate cake on the gray plate with the right gripper", - "subtask_index": 38 - }, - { - "subtask": "Place the banana on the gray plate with the left gripper", - "subtask_index": 39 - }, - { - "subtask": "Grasp the green lemon with the right gripper", - "subtask_index": 40 - }, - { - "subtask": "Grasp the bread slice with the right gripper", - "subtask_index": 41 - }, - { - "subtask": "Place the bread slice on the gray plate with the right gripper", - "subtask_index": 42 - }, - { - "subtask": "Place the brown towel on the gray plate with the right gripper", - "subtask_index": 43 - }, - { - "subtask": "Grasp the potato chips with the left gripper", - "subtask_index": 44 - }, - { - "subtask": "Grasp the duck toy with the right gripper", - "subtask_index": 45 - }, - { - "subtask": "End", - "subtask_index": 46 - }, - { - "subtask": "Grasp the blackboard erasure with the left gripper", - "subtask_index": 47 - }, - { - "subtask": "Grasp the coke with the left gripper", - "subtask_index": 48 - }, - { - "subtask": "Place the plugboard on the gray plate with the right gripper", - "subtask_index": 49 - }, - { - "subtask": "Place the round bread on the gray plate with the left gripper", - "subtask_index": 50 - }, - { - "subtask": "Grasp the round wooden block with the left gripper", - "subtask_index": 51 - }, - { - "subtask": "Grasp the compasses with the left gripper", - "subtask_index": 52 - }, - { - "subtask": "Place the round wooden block on the gray plate with the right gripper", - "subtask_index": 53 - }, - { - "subtask": "Place the blue pot on the gray plate with the left gripper", - "subtask_index": 54 - }, - { - "subtask": "Grasp the blue pot with the right gripper", - "subtask_index": 55 - }, - { - "subtask": "Grasp the round bread with the right gripper", - "subtask_index": 56 - }, - { - "subtask": "Grasp the chocolate cake with the left gripper", - "subtask_index": 57 - }, - { - "subtask": "Place the brown towel on the gray plate with the left gripper", - "subtask_index": 58 - }, - { - "subtask": "Place the blue pot on the gray plate with the right gripper", - "subtask_index": 59 - }, - { - "subtask": "Place the compasses on the gray plate with the left gripper", - "subtask_index": 60 - }, - { - "subtask": "Place the coke on the gray plate with the right gripper", - "subtask_index": 61 - }, - { - "subtask": "Grasp the tin with the right gripper", - "subtask_index": 62 - }, - { - "subtask": "Place the square chewing gum on the gray plate with the left gripper", - "subtask_index": 63 - }, - { - "subtask": "Place the back scratcher on the gray plate with the left gripper", - "subtask_index": 64 - }, - { - "subtask": "Grasp the tape with the right gripper", - "subtask_index": 65 - }, - { - "subtask": "Place the tin on the gray plate with the right gripper", - "subtask_index": 66 - }, - { - "subtask": "Grasp the coke with the right gripper", - "subtask_index": 67 - }, - { - "subtask": "Grasp the tape with the left gripper", - "subtask_index": 68 - }, - { - "subtask": "Grasp the square chewing gum with the right gripper", - "subtask_index": 69 - }, - { - "subtask": "Place the tape on the gray plate with the right gripper", - "subtask_index": 70 - }, - { - "subtask": "Place the hard facial cleanser on the gray plate with the left gripper", - "subtask_index": 71 - }, - { - "subtask": "Grasp the shower sphere with the right gripper", - "subtask_index": 72 - }, - { - "subtask": "Place the duck toy on the gray plate with the right gripper", - "subtask_index": 73 - }, - { - "subtask": "Grasp the bread slice with the left gripper", - "subtask_index": 74 - }, - { - "subtask": "Place the chocolate on the gray plate with the right gripper", - "subtask_index": 75 - }, - { - "subtask": "Grasp the blackboard erasure with the right gripper", - "subtask_index": 76 - }, - { - "subtask": "Place the banana on the gray plate with the right gripper", - "subtask_index": 77 - }, - { - "subtask": "Place the peach on the gray plate with the right gripper", - "subtask_index": 78 - }, - { - "subtask": "Grasp the round bread with the left gripper", - "subtask_index": 79 - }, - { - "subtask": "Grasp the square wooden block with the left gripper", - "subtask_index": 80 - }, - { - "subtask": "Grasp the blue cup with the right gripper", - "subtask_index": 81 - }, - { - "subtask": "Place the chocolate cake on the gray plate with the left gripper", - "subtask_index": 82 - }, - { - "subtask": "Place the duck toy on the gray plate with the left gripper", - "subtask_index": 83 - }, - { - "subtask": "Grasp the square wooden block with the right gripper", - "subtask_index": 84 - }, - { - "subtask": "Place the plugboard on the gray plate with the left gripper", - "subtask_index": 85 - }, - { - "subtask": "Place the bread slice on the gray plate with the left gripper", - "subtask_index": 86 - }, - { - "subtask": "Grasp the round wooden block with the right gripper", - "subtask_index": 87 - }, - { - "subtask": "Place the potato chips on the gray plate with the left gripper", - "subtask_index": 88 - }, - { - "subtask": "Grasp the banana with the right gripper", - "subtask_index": 89 - }, - { - "subtask": "Place the blue cup on the gray plate with the right gripper", - "subtask_index": 90 - }, { "subtask": "null", - "subtask_index": 91 + "subtask_index": 5 } ], "atomic_actions": [ "grasp", - "pick", - "place" + "lift", + "wipe", + "handover" ], "robot_name": [ - "Galaxea_R1_Lite" + "Agilex_Cobot_Magic" ], "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", "cam_right_wrist_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -32095,53 +30987,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 102, - "total_frames": 20357, + "total_episodes": 49, + "total_frames": 23847, "fps": 30, - "total_tasks": 92, - "total_videos": 408, + "total_tasks": 6, + "total_videos": 147, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, - "camera_views": 4, - "dataset_size": "786.26 MB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "217.68 MB" }, - "frame_num": 20357, - "dataset_size": "786.26 MB", - "data_structure": "Galaxea_R1_Lite_storage_object_gray_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (90 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 23847, + "dataset_size": "217.68 MB", + "data_structure": "Agilex_Cobot_Magic_erase_board_left_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:101" + "train": "0:48" }, "features": { - "observation.images.cam_head_left_rgb": { - "dtype": "video", - "shape": [ - 720, - 1280, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 720, - "video.width": 1280, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -32150,8 +31019,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -32163,8 +31032,8 @@ "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -32173,8 +31042,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -32186,8 +31055,8 @@ "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -32196,8 +31065,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -32209,7 +31078,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -32218,20 +31087,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -32240,14 +31121,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -32287,17 +31180,17 @@ }, "subtask_annotation": { "names": null, + "dtype": "int32", "shape": [ 5 - ], - "dtype": "int32" + ] }, "scene_annotation": { "names": null, + "dtype": "int32", "shape": [ 1 - ], - "dtype": "int32" + ] }, "eef_sim_pose_state": { "names": [ @@ -32314,10 +31207,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_sim_pose_action": { "names": [ @@ -32334,130 +31227,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" + ] }, "gripper_mode_state": { "names": [ "left_gripper_mode", "right_gripper_mode" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "gripper_mode_action": { "names": [ "left_gripper_mode", "right_gripper_mode" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "gripper_activity_state": { "names": [ "left_gripper_activity", "right_gripper_activity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "gripper_activity_action": { "names": [ "left_gripper_activity", "right_gripper_activity" ], + "dtype": "int32", "shape": [ 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" ], - "dtype": "int32" + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } }, "authors": { @@ -32479,494 +31372,393 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Airbot_MMK2_move_fake_food": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" + "R1_Lite_connect_the_router_cable": { + "path": "R1_Lite_connect_the_router_cable", + "dataset_name": "connect_the_router_cable", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place", + "insert" ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_move_fake_food", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "household", - "level2": "living_room", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "tasks": "Hand it to the other hand", "objects": [ { - "object_name": "early_education_toys", - "level1": "toys", - "level2": "early_education_toys", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "put the toy food on the toy table with your right hand." - ], - "sub_tasks": [ - { - "subtask": "End", - "subtask_index": 0 }, { - "subtask": "Grasp the small bowl of canned food with right gripper", - "subtask_index": 1 + "object_name": "router", + "level1": "electric_appliance", + "level2": "router", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the small bowl of canned food on the table with the right gripper", - "subtask_index": 2 + "object_name": "socket", + "level1": "electric_appliance", + "level2": "socket", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Abnormal", - "subtask_index": 3 + "object_name": "cable", + "level1": "electric_appliance", + "level2": "cable", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "null", - "subtask_index": 4 + "object_name": "plug", + "level1": "electric_appliance", + "level2": "plug", + "level3": null, + "level4": null, + "level5": null } ], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "robot_name": [ - "Airbot_MMK2" - ], - "end_effector_type": "five_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" - ], - "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" - }, - "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" - ], + "operation_platform_height": null, + "frame_range": "0-165617", + "dataset_size": "9.4GB", "statistics": { - "total_episodes": 50, - "total_frames": 7610, - "fps": 30, - "total_tasks": 5, - "total_videos": 200, + "total_episodes": 105, + "total_frames": 165617, + "total_tasks": 1, + "total_videos": 315, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "313.62 MB" + "fps": 30 }, - "frame_num": 7610, - "dataset_size": "313.62 MB", - "data_structure": "Airbot_MMK2_move_fake_food_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", - "splits": { - "train": "0:49" + "dataset_uuid": "2802db0f-0c22-4fba-b305-aae14b28e492", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Hand it to the other hand", + "Pick up plug", + "Pick up the cable", + "Close the lower drawer", + "Place the iced tea in the fridge", + "Open the upper fridge door", + "Plug it into the socket", + "End", + "Place zucchini in the lower drawer", + "Open the lower drawer", + "Deliver it to the other hand", + "Abnormal", + "Plug plug into socket", + "Open the plastic bag", + "Place pear in the lower drawer", + "Place the Wahaha in the fridge", + "Pick up cable", + "Close the middle refrigerator door", + "Open the middle refrigerator door", + "Place king oyster Mushroom in the lower drawer", + "Plug cable into router interface", + "Align cable and router interface", + "Close the upper refrigerator door", + "Align plug and socket", + "Plug it into the receiver", + "Pick up the plug", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" }, - "features": { - "observation.images.cam_head_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_left_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_right_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } - }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_connect_the_router_cable_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_connect_the_router_cable_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Cobot_Magic_pour_drink": { + "path": "Cobot_Magic_pour_drink", + "dataset_name": "pour_drink", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place", + "pour" + ], + "tasks": "Grasp the black cup with left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, - "observation.state": { - "dtype": "float32", - "shape": [ - 36 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" - ] - }, - "action": { - "dtype": "float32", - "shape": [ - 36 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" - ] - }, - "timestamp": { - "dtype": "float32", - "shape": [ - 1 - ], - "names": null - }, - "frame_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "episode_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "object_name": "NFC_orange_juice", + "level1": "drink", + "level2": "NFC_orange_juice", + "level3": null, + "level4": null, + "level5": null }, - "task_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "object_name": "red_wine", + "level1": "drink", + "level2": "red_wine", + "level3": null, + "level4": null, + "level5": null }, - "subtask_annotation": { - "names": null, - "shape": [ - 5 - ], - "dtype": "int32" + { + "object_name": "sprite", + "level1": "drink", + "level2": "sprite", + "level3": null, + "level4": null, + "level5": null }, - "scene_annotation": { - "names": null, - "shape": [ - 1 - ], - "dtype": "int32" + { + "object_name": "black_mug", + "level1": "container", + "level2": "black_mug", + "level3": null, + "level4": null, + "level5": null }, - "eef_sim_pose_state": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" + { + "object_name": "paper_cup", + "level1": "container", + "level2": "paper_cup", + "level3": null, + "level4": null, + "level5": null }, - "eef_sim_pose_action": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" + { + "object_name": "transparent_cup", + "level1": "container", + "level2": "transparent_cup", + "level3": null, + "level4": null, + "level5": null }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "tablecloth", + "level1": "clothing", + "level2": "tablecloth", + "level3": null, + "level4": null, + "level5": null }, - "eef_direction_action": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "cestbon", + "level1": "drink", + "level2": "cestbon", + "level3": null, + "level4": null, + "level5": null }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "coffee", + "level1": "drink", + "level2": "coffee", + "level3": null, + "level4": null, + "level5": null }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "cola", + "level1": "drink", + "level2": "cola", + "level3": null, + "level4": null, + "level5": null }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "milk", + "level1": "drink", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "cup", + "level1": "container", + "level2": "cup", + "level3": null, + "level4": null, + "level5": null } + ], + "operation_platform_height": 77.2, + "frame_range": "0-862292", + "dataset_size": "12.6GB", + "statistics": { + "total_episodes": 1613, + "total_frames": 862292, + "total_tasks": 61, + "total_videos": 4839, + "total_chunks": 2, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "9b6b4d09-0c15-4d1f-b8f9-12be94c871cd", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the black cup with left gripper", + "Grasp the white cup with right gripper", + "Static", + "Place the orange juice bottle on the table with left gripper", + "Lift the black cup with left gripper", + "Place the water bottle on the table with right gripper", + "Place the grey cup on the table with left gripper", + "End", + "Place the sprite bottle on the table with right gripper", + "Lift the red cup with left gripper", + "Place the cola bottle on the table with left gripper", + "Grasp the bottle with cola with right gripper", + "Grasp the bottle with sprite with left gripper", + "Pour the water from bottle to cup with right gripper", + "Place the coffee bottle on the table with right gripper", + "Grasp the bottle with orange juice with left gripper", + "Pour the red wine from bottle to cup with left gripper", + "Place the black cup in the center of view with right gripper", + "Place the transparent cup on the table with right gripper", + "Place the white cup on the table with right gripper", + "Grasp the bottle filled water with right gripper", + "Grasp the bottle with coffee with right gripper", + "Place the yellow paper cup on the table with left gripper", + "Pour the yuexian Milk from bottle to cup with right gripper", + "Grasp the bottle with red wine with left gripper", + "Grasp the black cup with right gripper", + "Place the white cup on the table with left gripper", + "Grasp the yellow paper cup with right gripper", + "Grasp the yellow paper cup with left gripper", + "Pour the orange juice from bottle to cup with left gripper", + "Grasp the bottle with water with right gripper", + "Pour the yogurt from bottle to cup with left gripper", + "Grasp the bottle with yuexian Milk with right gripper", + "Place the yellow paper cup on the table with right gripper", + "Pour the cola from bottle to cup with right gripper", + "Abnormal", + "Grasp the white cup with left gripper", + "Pour the sprite from bottle to cup with left gripper", + "Lift the grey cup with left gripper", + "Place the black cup on the table with left gripper", + "Grasp the red cup with left gripper", + "Place the red cup on the table with left gripper", + "Lift the yellow paper cup with right gripper", + "Grasp the grey cup with left gripper", + "Place the yuexian Milk bottle on the table with right gripper", + "Lift the yellow paper cup with left gripper", + "Place the sprite bottle on the table with left gripper", + "Lift the white cup with left gripper", + "Place the cola bottle on the table with right gripper", + "Pour the sprite from bottle to cup with right gripper", + "Place the red wine bottle on the table with left gripper", + "Lift the white cup with right gripper", + "Grasp the transparent cup with right gripper", + "Grasp the bottle with sprite with right gripper", + "Pour the coffee from bottle to cup with right gripper", + "Lift the grey cup with right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" }, "authors": { "contributed_by": [ { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } ] }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_pour_drink_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)", + "structure": "Cobot_Magic_pour_drink_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)" }, - "Galbot_g1_steamer_storage_baozi_h": { - "path": "Galbot_g1_steamer_storage_baozi_h", - "dataset_name": "steamer_storage_baozi_h", + "R1_Lite_wash_the_tableware": { + "path": "R1_Lite_wash_the_tableware", + "dataset_name": "wash_the_tableware", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -32977,7 +31769,7 @@ "pick", "place" ], - "tasks": "End", + "tasks": "Grasp yellow bowl with right gripper", "objects": [ { "object_name": "table", @@ -32988,32 +31780,40 @@ "level5": null }, { - "object_name": "baozi", - "level1": "food", - "level2": "baozi", + "object_name": "sink", + "level1": "container", + "level2": "sink", "level3": null, "level4": null, "level5": null }, { - "object_name": "steamer", - "level1": "cookware", - "level2": "steamer", + "object_name": "chopsticks", + "level1": "tableware", + "level2": "chopsticks", "level3": null, "level4": null, "level5": null }, { - "object_name": "pot_lid", - "level1": "daily_necessities", - "level2": "pot_lid", + "object_name": "spoon", + "level1": "tableware", + "level2": "spoon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "container", + "level2": "bowl", "level3": null, "level4": null, "level5": null }, { "object_name": "plate", - "level1": "kitchen_supplies", + "level1": "container", "level2": "plate", "level3": null, "level4": null, @@ -33021,18 +31821,18 @@ } ], "operation_platform_height": null, - "frame_range": "0-979757", - "dataset_size": "18.2GB", + "frame_range": "0-197672", + "dataset_size": "7.3GB", "statistics": { - "total_episodes": 996, - "total_frames": 979757, + "total_episodes": 137, + "total_frames": 197672, "total_tasks": 1, - "total_videos": 2988, + "total_videos": 411, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "23188104-2751-4169-b13b-286381a44645", + "dataset_uuid": "62b4f49b-4413-4d29-9822-c440835cdf41", "language": [ "en", "zh" @@ -33041,11 +31841,46 @@ "robotics" ], "sub_tasks": [ + "Grasp yellow bowl with right gripper", + "Pick up the bowl", + "Open the dispenser valve with right gripper", "End", - "Place the baozi on the steamer with right gripper", - "Place the pot lid on the steamer with left gripper", - "Grasp the pot lid with left gripper", - "Grasp the baozi in the plate with right gripper", + "Rinse the chopsticks", + "Put the chopsticks into the sink", + "Place the chopsticks on the table", + "Put a spoon back to the table from the sink", + "Put a bowl back to the table from the sink", + "Place pink bowl into the sink with right gripper", + "Rinse the plate", + "Wait for the water to rinse the bowl", + "Place a chopstick into the sink with right gripper", + "Put a bowl into the sink", + "Rinse the bowl", + "Place a chopstick on the bowl with right gripper", + "Place the plate on the table", + "Pour the water from the yellow bowl into the sink with right gripper", + "Place the bowl on the table", + "Pick up the chopsticks", + "Put the chopsticks back to the table from the sink", + "Put a spoon into the sink", + "Pour the water from the pink bowl into the sink with right gripper", + "Place the chopsticks on the bowl", + "Abnormal", + "Pick up the spoon", + "Rinse the spoon", + "Place yellow bowl on the table with right gripper", + "Place yellow bowl into the sink with right gripper", + "Place the spoon on the plate", + "Put a plate back to the table from the sink", + "Turn off the faucet", + "Pick up the plate", + "Place the spoon on the bowl", + "Close the dispenser valve with right gripper", + "Grasp a chopstick with right gripper", + "Grasp pink bowl with right gripper", + "Turn on the faucet", + "Place pink bowl on the table with right gripper", + "Put a plate into the sink", "null" ], "annotations": { @@ -33083,549 +31918,70 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Galbot_g1_steamer_storage_baozi_h_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Galbot_g1_steamer_storage_baozi_h_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "R1_Lite_wash_the_tableware_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_wash_the_tableware_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_pull_tissue": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" + "AIRBOT_MMK2_store_peaches_and_pears": { + "path": "AIRBOT_MMK2_store_peaches_and_pears", + "dataset_name": "store_peaches_and_pears", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_pull_tissue", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "household", - "level2": "living_room", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "tasks": "Place the pear into the right compartment of the storage box with the right gripper", "objects": [ { - "object_name": "tissue_paper", - "level1": "kitchen_supplies", - "level2": "paper_towels", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "tissue_box", - "level1": "kitchen_supplies", - "level2": "paper_towels", + "object_name": "peache", + "level1": "fruit", + "level2": "peache", "level3": null, "level4": null, "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "take a tissue paper out of tissue box by hand." - ], - "sub_tasks": [ - { - "subtask": "Place the tissue on the table with the right gripper", - "subtask_index": 0 }, { - "subtask": "End", - "subtask_index": 1 + "object_name": "pear", + "level1": "fruit", + "level2": "pear", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Pull out a piece of tissue with the right gripper", - "subtask_index": 2 - }, - { - "subtask": "Abnormal", - "subtask_index": 3 - }, - { - "subtask": "Grasp the a piece of tissue with the right gripper", - "subtask_index": 4 - }, - { - "subtask": "null", - "subtask_index": 5 - } - ], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "robot_name": [ - "Airbot_MMK2" - ], - "end_effector_type": "five_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" - ], - "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" - }, - "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" - ], - "statistics": { - "total_episodes": 49, - "total_frames": 11531, - "fps": 30, - "total_tasks": 6, - "total_videos": 196, - "total_chunks": 1, - "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "405.79 MB" - }, - "frame_num": 11531, - "dataset_size": "405.79 MB", - "data_structure": "Airbot_MMK2_pull_tissue_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", - "splits": { - "train": "0:48" - }, - "features": { - "observation.images.cam_head_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_left_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_right_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.state": { - "dtype": "float32", - "shape": [ - 36 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" - ] - }, - "action": { - "dtype": "float32", - "shape": [ - 36 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" - ] - }, - "timestamp": { - "dtype": "float32", - "shape": [ - 1 - ], - "names": null - }, - "frame_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "episode_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "task_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "subtask_annotation": { - "names": null, - "shape": [ - 5 - ], - "dtype": "int32" - }, - "scene_annotation": { - "names": null, - "shape": [ - 1 - ], - "dtype": "int32" - }, - "eef_sim_pose_state": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_sim_pose_action": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_direction_action": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - } - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" - } - ] - }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" - }, - "leju_robot_pass_the_cleaner_b": { - "path": "leju_robot_pass_the_cleaner_b", - "dataset_name": "pass_the_cleaner_b", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Turn the bottle to the front side.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cleaner", - "level1": "daily_necessities", - "level2": "cleaner", + "object_name": "compartment", + "level1": "container", + "level2": "compartment", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-164222", - "dataset_size": "8.5GB", + "operation_platform_height": 77.2, + "frame_range": "0-11276", + "dataset_size": "343.0MB", "statistics": { - "total_episodes": 441, - "total_frames": 164222, + "total_episodes": 48, + "total_frames": 11276, "total_tasks": 1, - "total_videos": 1323, + "total_videos": 192, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "ceb2e30d-5b33-4f0f-b9e0-a88ee69d8e21", + "dataset_uuid": "c95d9ed0-71ed-4c90-94ec-30af0eb88215", "language": [ "en", "zh" @@ -33634,8 +31990,12 @@ "robotics" ], "sub_tasks": [ - "Turn the bottle to the front side.", - "Pick up the bottle from the table.", + "Place the pear into the right compartment of the storage box with the right gripper", + "Place the peach into the left compartment of the storage box with the left gripper", + "Abnormal", + "Grasp a peach with the left gripper", + "End", + "Grasp a pear with the right gripper", "null" ], "annotations": { @@ -33673,70 +32033,52 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "leju_robot_pass_the_cleaner_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_pass_the_cleaner_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_store_peaches_and_pears_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_store_peaches_and_pears_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Cobot_Magic_place_the_test_tube": { - "path": "Cobot_Magic_place_the_test_tube", - "dataset_name": "place_the_test_tube", + "G1edu-u3_pullBowl_storage_bread_b": { + "path": "G1edu-u3_pullBowl_storage_bread_b", + "dataset_name": "pullBowl__storage2bread_unordered", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "three_finger_hand" ], "scene_type": [], "atomic_actions": [ - "grasp", - "pick", - "place" + "walk" ], - "tasks": "Place the test tubes on the test tube rack with the right gripper", + "tasks": "Move the pink bowl to the center of table with right hand", "objects": [ { - "object_name": "table", + "object_name": "chair", "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "test_tube", - "level1": "tool", - "level2": "test_tube", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "test_tube_rack", - "level1": "tool", - "level2": "test_tube_rack", + "level2": "chair", "level3": null, "level4": null, "level5": null }, { - "object_name": "tablecloth", - "level1": "clothing", - "level2": "tablecloth", + "object_name": "bear_doll", + "level1": "toy", + "level2": "bear_doll", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-308346", - "dataset_size": "6.0GB", + "frame_range": "0-162960", + "dataset_size": "3.3GB", "statistics": { - "total_episodes": 364, - "total_frames": 308346, - "total_tasks": 6, - "total_videos": 1092, + "total_episodes": 186, + "total_frames": 162960, + "total_tasks": 1, + "total_videos": 558, "total_chunks": 1, "chunks_size": 1000, - "fps": 50 + "fps": 30 }, - "dataset_uuid": "070ad40e-7f20-4d3e-82d6-9cb8c0fb1f74", + "dataset_uuid": "11ed8a7f-106a-4c08-98a4-0abdd1b97a48", "language": [ "en", "zh" @@ -33745,18 +32087,15 @@ "robotics" ], "sub_tasks": [ - "Place the test tubes on the test tube rack with the right gripper", - "Lift the test tube with the left hand.", + "Move the pink bowl to the center of table with right hand", + "Static", + "Grasp the round bread with left hand", "End", - "Pass the test tube from the left gripper to the right gripper", - "Pick up the test tubes from the table", - "Place the test tubes on the rack", - "Hand over the test tubes", - "Grab the test tube with the left hand.", - "Receive the test tube with the right hand.", - "Pick up the test tube with the left gripper", - "Insert it into the hole at the intersection of the 5th row from the top and the 5th column from the left with the right hand.", - "abnormal", + "Move towards the toy bear on the chair", + "Grasp the long bread with left hand", + "Place the round bread in pink bowl with left hand", + "Abnormal", + "Place the long bread in pink bowl with left hand", "null" ], "annotations": { @@ -33794,24 +32133,23 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Cobot_Magic_place_the_test_tube_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_place_the_test_tube_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "G1edu-u3_pullBowl_storage_bread_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_pullBowl_storage_bread_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Galbot_g1_fold_clothe_c": { - "path": "Galbot_g1_fold_clothe_c", - "dataset_name": "fold_clothe_c", + "AIRBOT_MMK2_the_cup_is_put_into_the_bucket": { + "path": "AIRBOT_MMK2_the_cup_is_put_into_the_bucket", + "dataset_name": "the_cup_is_put_into_the_bucket", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "five_finger_hand" ], "scene_type": [], "atomic_actions": [ "grasp", - "pick", - "place", - "flod" + "open", + "hold" ], - "tasks": "Abnormal", + "tasks": "Lift the cup the right gripper", "objects": [ { "object_name": "table", @@ -33822,27 +32160,35 @@ "level5": null }, { - "object_name": "clothes", - "level1": "fabric", - "level2": "clothes", + "object_name": "paper_cup", + "level1": "container", + "level2": "paper_cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bucket", + "level1": "container", + "level2": "bucket", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-516810", - "dataset_size": "22.3GB", + "operation_platform_height": 77.2, + "frame_range": "0-9335", + "dataset_size": "369.4MB", "statistics": { - "total_episodes": 666, - "total_frames": 516810, + "total_episodes": 50, + "total_frames": 9335, "total_tasks": 1, - "total_videos": 1998, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "9c9c15eb-08d9-4119-b551-ce2439643f0f", + "dataset_uuid": "3d103654-86bc-4dae-b569-be335fe5d8df", "language": [ "en", "zh" @@ -33851,17 +32197,14 @@ "robotics" ], "sub_tasks": [ - "Abnormal", - "use the left gripper to clamp the left edge of the fabric", - "Drag the clothes to the center of the table", + "Lift the cup the right gripper", + "Static", "End", - "use both grippers simultaneously to clamp the upper edge of the clothing fabric", - "use both grippers to drag the lower edge of the fabric forward and fold it over the upper edge", - "use both grippers to drag the upper edge of the fabric backward and fold it over the lower edge", - "use both grippers simultaneously to clamp the lower edge of the clothing fabric", - "Drag the clothes downward with both gripper", - "use the left gripper to drag the left edge of the fabric to the left and folds it over the right edge", - "Flip the folded clothes over with left gripper", + "Grasp the cup the right gripper", + "Lift the cup the left gripper", + "Grasp the cup the left gripper", + "place the cup in the yellow basket use the right gripper", + "place the cup in the yellow basket use the left gripper", "null" ], "annotations": { @@ -33899,15 +32242,15 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Galbot_g1_fold_clothe_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Galbot_g1_fold_clothe_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_the_cup_is_put_into_the_bucket_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_the_cup_is_put_into_the_bucket_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "R1_Lite_open_the_food_pan": { - "path": "R1_Lite_open_the_food_pan", - "dataset_name": "open_the_food_pan", + "leju_robot_box_storage_parcel_d": { + "path": "leju_robot_box_storage_parcel_d", + "dataset_name": "box_storage_parcel_d", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "five_finger_hand" ], "scene_type": [], "atomic_actions": [ @@ -33915,7 +32258,7 @@ "pick", "place" ], - "tasks": "Hold the pot lid with the right arm", + "tasks": "Pass the laundry detergent to the left gripper", "objects": [ { "object_name": "table", @@ -33926,126 +32269,43 @@ "level5": null }, { - "object_name": "pan", - "level1": "container", - "level2": "pan", + "object_name": "box", + "level1": "home_storage", + "level2": "box", "level3": null, "level4": null, "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-74981", - "dataset_size": "2.6GB", - "statistics": { - "total_episodes": 94, - "total_frames": 74981, - "total_tasks": 1, - "total_videos": 282, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "41f586d5-51f7-456a-b30f-ca5a7e7555c2", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Hold the pot lid with the right arm", - "Close the pot lid", - "Open the pot lid completely", - "Hold the pot lid with the left arm", - "Open the food pot lid a small opening with the right arm", - "Open the food pot lid a small opening with the left arm", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_open_the_food_pan_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_open_the_food_pan_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "leju_robot_pass_the_cleaner_e": { - "path": "leju_robot_pass_the_cleaner_e", - "dataset_name": "pass_the_cleaner_e", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Turn the bottle to the front side.", - "objects": [ + }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "parcel", + "level1": "container", + "level2": "parcel", "level3": null, "level4": null, "level5": null }, { - "object_name": "cleaner", - "level1": "daily_necessities", - "level2": "cleaner", + "object_name": "conveyor_belt", + "level1": "industrial_equipment", + "level2": "conveyor_belt", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-165558", - "dataset_size": "8.6GB", + "frame_range": "0-288956", + "dataset_size": "18.0GB", "statistics": { - "total_episodes": 430, - "total_frames": 165558, + "total_episodes": 212, + "total_frames": 288956, "total_tasks": 1, - "total_videos": 1290, + "total_videos": 636, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "5857e16d-9974-4991-a27c-549f3ba35f03", + "dataset_uuid": "a24f0c02-18b7-4770-8ed1-41d5db2ba6d8", "language": [ "en", "zh" @@ -34054,8 +32314,13 @@ "robotics" ], "sub_tasks": [ - "Turn the bottle to the front side.", - "Pick up the bottle from the table.", + "Pass the laundry detergent to the left gripper", + "Pick up the fast-moving consumer goods bottle from the table.", + "Take the fast-moving consumer goods bottle from the table.", + "Abnormal", + "Grasp the laundry detergent with right gripper", + "Place the laundry detergent on the yellow rubber mat with left gripper", + "Flip the fast-moving consumer goods bottle to the front side.", "null" ], "annotations": { @@ -34093,12 +32358,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "leju_robot_pass_the_cleaner_e_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_pass_the_cleaner_e_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "leju_robot_box_storage_parcel_d_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_box_storage_parcel_d_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "RMC-AIDA-L_basket_storage_orange": { - "path": "RMC-AIDA-L_basket_storage_orange", - "dataset_name": "basket_storage_orange", + "Cobot_Magic_move_the_cup": { + "path": "Cobot_Magic_move_the_cup", + "dataset_name": "move_the_cup", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -34109,7 +32374,7 @@ "pick", "place" ], - "tasks": "Place the dark basket in the center of view with the right gripper", + "tasks": "Grasp the transparent glass cup", "objects": [ { "object_name": "table", @@ -34120,35 +32385,35 @@ "level5": null }, { - "object_name": "basket", + "object_name": "glass_cup", "level1": "container", - "level2": "basket", + "level2": "glass_cup", "level3": null, "level4": null, "level5": null }, { - "object_name": "orange", - "level1": "fruit", - "level2": "orange", + "object_name": "towel", + "level1": "clothing", + "level2": "towel", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-112423", + "frame_range": "0-28338", "dataset_size": "1.1GB", "statistics": { - "total_episodes": 356, - "total_frames": 112423, - "total_tasks": 4, - "total_videos": 1068, + "total_episodes": 100, + "total_frames": 28338, + "total_tasks": 1, + "total_videos": 300, "total_chunks": 1, "chunks_size": 1000, - "fps": 30 + "fps": 50 }, - "dataset_uuid": "f96c79dc-15ee-43ed-8c21-1e898a816ee6", + "dataset_uuid": "503e7d0b-c281-4ea6-98ff-ebe74764c6ad", "language": [ "en", "zh" @@ -34157,21 +32422,8 @@ "robotics" ], "sub_tasks": [ - "Place the dark basket in the center of view with the right gripper", - "Place the light basket in the center of view with the right gripper", - "Place the orange into the dark basket with the left gripper", - "Place the dark basket in the center of view with the left gripper", - "end", - "Place the orange into the light basket with the left gripper", - "Pick up the light basket with the right gripper", - "Place the orange into the light basket with the right gripper", - "Pick up the orange with the right gripper", - "Place the light basket in the center of view with the left gripper", - "Pick up the dark basket with the left gripper", - "Pick up the light basket with the left gripper", - "Pick up the dark basket with the right gripper", - "Pick up the orange with the left gripper", - "Place the orange into the dark basket with the right gripper", + "Grasp the transparent glass cup", + "Place the glass cup on the blue zone", "null" ], "annotations": { @@ -34209,24 +32461,23 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "RMC-AIDA-L_basket_storage_orange_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_basket_storage_orange_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Cobot_Magic_move_the_cup_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_move_the_cup_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Split_aloha_scoop_coffee_beans": { - "path": "Split_aloha_scoop_coffee_beans", - "dataset_name": "scoop_coffee_beans", + "R1_Lite_storage_of_toiletries": { + "path": "R1_Lite_storage_of_toiletries", + "dataset_name": "storage_of_toiletries", "robot_type": "", "end_effector_type": [ "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ - "scoop", "grasp", "pick", "place" ], - "tasks": "Place the spoon back in its original place", + "tasks": "Place the toothpaste in the storage bag", "objects": [ { "object_name": "table", @@ -34237,51 +32488,51 @@ "level5": null }, { - "object_name": "cup", + "object_name": "storage_bag", "level1": "container", - "level2": "cup", + "level2": "storage_bag", "level3": null, "level4": null, "level5": null }, { - "object_name": "cup_lid", - "level1": "container", - "level2": "cup_lid", + "object_name": "toothpaste", + "level1": "personal_care_products", + "level2": "toothpaste", "level3": null, "level4": null, "level5": null }, { - "object_name": "spoon", - "level1": "tableware", - "level2": "spoon", + "object_name": "toothbrush", + "level1": "personal_care_products", + "level2": "toothbrush", "level3": null, "level4": null, "level5": null }, { - "object_name": "coffee_beans", - "level1": "food", - "level2": "coffee_beans", + "object_name": "comb", + "level1": "personal_care_products", + "level2": "comb", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-409440", - "dataset_size": "3.8GB", + "operation_platform_height": null, + "frame_range": "0-138432", + "dataset_size": "7.0GB", "statistics": { - "total_episodes": 499, - "total_frames": 409440, - "total_tasks": 8, - "total_videos": 1497, + "total_episodes": 101, + "total_frames": 138432, + "total_tasks": 1, + "total_videos": 303, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "643f286d-7021-4fb1-8c63-aecd24fb763e", + "dataset_uuid": "25f2cd96-c284-44a9-b59c-98e204e16347", "language": [ "en", "zh" @@ -34290,11 +32541,19 @@ "robotics" ], "sub_tasks": [ - "Place the spoon back in its original place", - "Move the cup to the center of view", - "Pick up the spoon", - "Scoop a spoonful of beans into the cup", + "Place the toothpaste in the storage bag", + "Pick up the comb", + "Place the comb on the table", + "Place the comb into the storage bag", + "Pick up the storage bag", + "Pick up a toothbrush", + "Place the toothpaste on the table", "abnormal", + "Pick up the toothpaste", + "Open the storage bag", + "Close the strorage bag and place it on the desk", + "Place the toothbrush in the storage bag", + "Hand it to the other hand", "null" ], "annotations": { @@ -34332,12 +32591,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Split_aloha_scoop_coffee_beans_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Split_aloha_scoop_coffee_beans_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "R1_Lite_storage_of_toiletries_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_storage_of_toiletries_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "R1_Lite_put_on_a_garbage_bag": { - "path": "R1_Lite_put_on_a_garbage_bag", - "dataset_name": "put_on_a_garbage_bag", + "AgiBot-g1_storage_item_d": { + "path": "AgiBot-g1_storage_item_d", + "dataset_name": "storage_item_d", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -34345,49 +32604,57 @@ "scene_type": [], "atomic_actions": [ "grasp", - "pick", - "place" + "place", + "pick" ], - "tasks": "Abnormal", + "tasks": "Grab and pick up both the mouse and power cord from the accessory packaging area at the same time", "objects": [ { - "object_name": "trash_bag", - "level1": "container", - "level2": "trash_bag", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "trash_can", + "object_name": "data_cable", + "level1": "tool", + "level2": "data_cable", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "box", "level1": "container", - "level2": "trash_can", + "level2": "box", "level3": null, "level4": null, "level5": null }, { - "object_name": "trash_can_ring", - "level1": "daily_necessities", - "level2": "trash_can_ring", + "object_name": "mouse", + "level1": "tool", + "level2": "mouse", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-252689", - "dataset_size": "11.0GB", + "frame_range": "0-180148", + "dataset_size": "86.0GB", "statistics": { - "total_episodes": 133, - "total_frames": 252689, + "total_episodes": 450, + "total_frames": 180148, "total_tasks": 1, - "total_videos": 399, + "total_videos": 3600, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "c689a888-c8c9-4a05-a250-680b40218803", + "dataset_uuid": "49f10ba2-bad7-46fc-ae4b-f6142efa81a2", "language": [ "en", "zh" @@ -34396,19 +32663,8 @@ "robotics" ], "sub_tasks": [ - "Abnormal", - "End", - "Grasp a new trash bag with the right gripper", - "put the new garbage bag on the trash can", - "Place the trash can ring on the trash can with the right gripper", - "Pick up the trash bag", - "Grasp the trash bag with the right gripper", - "place the trash can ring on the floor with right gripper", - "Place the trash bag on the floor with the right gripper", - "Place the trash can ring on the floor", - "Pick up a new trash bag", - "Place the trash can ring on the trash can", - "Place the trash bag on the floor", + "Grab and pick up both the mouse and power cord from the accessory packaging area at the same time", + "Place the mouse and power cord into the box", "null" ], "annotations": { @@ -34446,10 +32702,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_put_on_a_garbage_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_put_on_a_garbage_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AgiBot-g1_storage_item_d_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_storage_item_d_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_storage_and_take_cake_plate": { + "Agilex_Cobot_Magic_erase_board_passing_right_to_left": { "task_categories": [ "robotics" ], @@ -34479,11 +32735,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_and_take_cake_plate", + "dataset_name": "Agilex_Cobot_Magic_erase_board_passing_right_to_left", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "kitchen", + "level1": "education", + "level2": "school", "level3": null, "level4": null, "level5": null @@ -34491,17 +32747,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "cake", - "level1": "bread", - "level2": "cake", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", + "object_name": "whiteboard", + "level1": "stationery", + "level2": "whiteboard", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "whiteboard_eraser", + "level1": "stationery", + "level2": "whiteboard_eraser", "level3": null, "level4": null, "level5": null @@ -34509,31 +32773,31 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "put the cake into the plate with left hand and take it out with right hand." + "use the right gripper to place the eraser in the left, use the left gripper to pick up the eraser, wipe the notes on the whiteboard clean, and then put it down." ], "sub_tasks": [ { - "subtask": "Place the cake on the table with the right gripper", + "subtask": "Place the eraser with the left gripper", "subtask_index": 0 }, { - "subtask": "Grasp the cake with the left gripper", + "subtask": "Grasp the eraser with the left gripper", "subtask_index": 1 }, { - "subtask": "Static", + "subtask": "Abnormal", "subtask_index": 2 }, { - "subtask": "Place the cake into the white plate with the left gripper", + "subtask": "End", "subtask_index": 3 }, { - "subtask": "Grasp the cake on the plate with the right gripper", + "subtask": "Move the eraser to the left side of the blackboard with the right gripper", "subtask_index": 4 }, { - "subtask": "End", + "subtask": "Grasp the eraser and wipe the blackboard with the left gripper", "subtask_index": 5 }, { @@ -34543,31 +32807,30 @@ ], "atomic_actions": [ "grasp", - "pick", - "place" + "lift", + "wipe", + "handover" ], "robot_name": [ - "Airbot_MMK2" + "Agilex_Cobot_Magic" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -34578,23 +32841,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 9782, + "total_episodes": 47, + "total_frames": 38184, "fps": 30, "total_tasks": 7, - "total_videos": 200, + "total_videos": 141, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "487.34 MB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "334.71 MB" }, - "frame_num": 9782, - "dataset_size": "487.34 MB", - "data_structure": "Airbot_MMK2_storage_and_take_cake_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", - "splits": { - "train": "0:49" + "frame_num": 38184, + "dataset_size": "334.71 MB", + "data_structure": "Agilex_Cobot_Magic_erase_board_passing_right_to_left_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:46" }, "features": { "observation.images.cam_head_rgb": { @@ -34666,33 +32929,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, "observation.state": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -34701,42 +32941,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -34745,36 +32975,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -34814,17 +33034,17 @@ }, "subtask_annotation": { "names": null, + "dtype": "int32", "shape": [ 5 - ], - "dtype": "int32" + ] }, "scene_annotation": { "names": null, + "dtype": "int32", "shape": [ 1 - ], - "dtype": "int32" + ] }, "eef_sim_pose_state": { "names": [ @@ -34841,10 +33061,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_sim_pose_action": { "names": [ @@ -34861,70 +33081,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" ], - "dtype": "int32" + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } }, "authors": { @@ -34946,254 +33226,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" - }, - "leju_robot_hotel_services_f": { - "path": "leju_robot_hotel_services_f", - "dataset_name": "hotel_services_f", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Hand the room key to the person.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "home_storage", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "card", - "level1": "nfc", - "level2": "card", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "sensor", - "level1": "electronic_products", - "level2": "sensor", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-95863", - "dataset_size": "6.3GB", - "statistics": { - "total_episodes": 135, - "total_frames": 95863, - "total_tasks": 1, - "total_videos": 405, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "9c2a5a4c-c510-4e80-983e-593589a8eec6", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Hand the room key to the person.", - "Place the ID card on the card reader.", - "Take the ID card from the person's hand.", - "Pick up the room key from the key card box.", - "Hand the ID card to the person.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_hotel_services_f_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_hotel_services_f_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "G1edu-u3_pullBowl_storage_bread_unordered_C": { - "path": "G1edu-u3_pullBowl_storage_bread_unordered_C", - "dataset_name": "pullBowl_storage_bread_unordered_C", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place the long bread in pink bowl with left hand", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "basket", - "level1": "home_storage", - "level2": "basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bowl", - "level1": "bowl", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "donut", - "level1": "food", - "level2": "donut", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cake", - "level1": "bread", - "level2": "cake", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-110085", - "dataset_size": "2.2GB", - "statistics": { - "total_episodes": 190, - "total_frames": 110085, - "total_tasks": 1, - "total_videos": 570, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "11ed8a7f-106a-4c08-98a4-0abdd1b97a51", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the long bread in pink bowl with left hand", - "End", - "Grasp the round bread with left hand", - "Grasp the long bread with left hand", - "Place the round bread in pink bowl with left hand", - "Move the pink bowl to the center of table with right hand", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_pullBowl_storage_bread_unordered_C_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_pullBowl_storage_bread_unordered_C_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Realman_RMC-AIDA-L_storage_block_basket": { + "Galaxea_R1_Lite_classify_object_five": { "task_categories": [ "robotics" ], @@ -35223,11 +33258,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Realman_RMC-AIDA-L_storage_block_basket", + "dataset_name": "Galaxea_R1_Lite_classify_object_five", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "living_room", + "level1": "commercial_convenience", + "level2": "supermarket", "level3": null, "level4": null, "level5": null @@ -35235,25 +33270,49 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "brown_basket", + "level1": "baskets", + "level2": "brown_basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "basket", - "level1": "home_storage", - "level2": "basket", + "object_name": "yellow_basket", + "level1": "baskets", + "level2": "yellow_basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "building_block", - "level1": "toy", - "level2": "building_block", + "object_name": "any_fruits", + "level1": "fruits", + "level2": "any_fruits", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "any_vegetables", + "level1": "vegetables", + "level2": "any_vegetables", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "any_snacks", + "level1": "snacks", + "level2": "any_snacks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "any_bread", + "level1": "bread", + "level2": "any_bread", "level3": null, "level4": null, "level5": null @@ -35261,28 +33320,240 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "the left gripper grasp the basket on the table, the right grippe pick up the blocks on the table and place it into the basket." + "place the food in the right basket with the right gripper, and place the non food items in the left basket with the left gripper." ], "sub_tasks": [ { - "subtask": "End", + "subtask": "Grasp the rubiks cube and put it in the left basket", "subtask_index": 0 }, { - "subtask": "Grasp the blue cube with the right gripper", + "subtask": "Grasp the waffle and put it in the right basket", "subtask_index": 1 }, { - "subtask": "Place the blue cube into the basket with the right gripper", + "subtask": "Grasp the soft cleanser and put it in the left basket", "subtask_index": 2 }, { - "subtask": "Grasp the basket with the left gripper", + "subtask": "Grasp the back scratcher and put it in the left basket", "subtask_index": 3 }, { - "subtask": "null", + "subtask": "Grasp the apple and put it in the right basket", "subtask_index": 4 + }, + { + "subtask": "End", + "subtask_index": 5 + }, + { + "subtask": "Grasp the white eraser and put it in the left basket", + "subtask_index": 6 + }, + { + "subtask": "Grasp the square chewing gum and put it in the right basket", + "subtask_index": 7 + }, + { + "subtask": "Grasp the power strip and put it in the left basket", + "subtask_index": 8 + }, + { + "subtask": "Grasp the cleaning agent and put it in the left basket", + "subtask_index": 9 + }, + { + "subtask": "Grasp the soda water and put it in the right basket", + "subtask_index": 10 + }, + { + "subtask": "Grasp the spoon and put it in the left basket", + "subtask_index": 11 + }, + { + "subtask": "Grasp the duck toys and put it in the left basket", + "subtask_index": 12 + }, + { + "subtask": "Grasp the triangle cake and put it in the right basket", + "subtask_index": 13 + }, + { + "subtask": "Grasp the compass and put it in the right basket", + "subtask_index": 14 + }, + { + "subtask": "Grasp the cookie and put it in the right basket", + "subtask_index": 15 + }, + { + "subtask": "Grasp the compass and put it in the left basket", + "subtask_index": 16 + }, + { + "subtask": "Grasp the orange and put it in the right basket", + "subtask_index": 17 + }, + { + "subtask": "Grasp the ballpoint pen and put it in the left basket", + "subtask_index": 18 + }, + { + "subtask": "Grasp the round bread and put it in the right basket", + "subtask_index": 19 + }, + { + "subtask": "Grasp the egg yolk pastry and put it in the right basket", + "subtask_index": 20 + }, + { + "subtask": "Grasp the lemon and put it in the right basket", + "subtask_index": 21 + }, + { + "subtask": "Grasp the soap and put it in the left basket", + "subtask_index": 22 + }, + { + "subtask": "Grasp the washing liquid and put it in the left basket", + "subtask_index": 23 + }, + { + "subtask": "Grasp the hard cleanser and put it in the left basket", + "subtask_index": 24 + }, + { + "subtask": "Grasp the milk and put it in the right basket", + "subtask_index": 25 + }, + { + "subtask": "Grasp the black marker and put it in the left basket", + "subtask_index": 26 + }, + { + "subtask": "Grasp the banana and put it in the right basket", + "subtask_index": 27 + }, + { + "subtask": "Grasp the black glass cup and put it in the left basket", + "subtask_index": 28 + }, + { + "subtask": "Grasp the brush and put it in the left basket", + "subtask_index": 29 + }, + { + "subtask": "Grasp the bath ball and put it in the left basket", + "subtask_index": 30 + }, + { + "subtask": "Grasp the blue towel and put it in the left basket", + "subtask_index": 31 + }, + { + "subtask": "Grasp the tea cup and put it in the left basket", + "subtask_index": 32 + }, + { + "subtask": "Grasp the peeler and put it in the left basket", + "subtask_index": 33 + }, + { + "subtask": "Grasp the brown towel and put it in the left basket", + "subtask_index": 34 + }, + { + "subtask": "Grasp the peach and put it in the right basket", + "subtask_index": 35 + }, + { + "subtask": "Abnormal", + "subtask_index": 36 + }, + { + "subtask": "Grasp the chocolate and put it in the right basket", + "subtask_index": 37 + }, + { + "subtask": "Grasp the grey towel and put it in the left basket", + "subtask_index": 38 + }, + { + "subtask": "Grasp the canned cola and put it in the right basket", + "subtask_index": 39 + }, + { + "subtask": "Grasp the tape and put it in the left basket", + "subtask_index": 40 + }, + { + "subtask": "Grasp the bread slice and put it in the right basket", + "subtask_index": 41 + }, + { + "subtask": "Grasp the tin and put it in the right basket", + "subtask_index": 42 + }, + { + "subtask": "Grasp the soap and put it in the right basket", + "subtask_index": 43 + }, + { + "subtask": "Grasp the glasses case and put it in the left basket", + "subtask_index": 44 + }, + { + "subtask": "Grasp the yellow duck and put it in the right basket", + "subtask_index": 45 + }, + { + "subtask": "Grasp the peach doll and put it in the right basket", + "subtask_index": 46 + }, + { + "subtask": "Grasp the blue cup and put it in the left basket", + "subtask_index": 47 + }, + { + "subtask": "Grasp the pen container and put it in the left basket", + "subtask_index": 48 + }, + { + "subtask": "Grasp the red duck and put it in the left basket", + "subtask_index": 49 + }, + { + "subtask": "Grasp the lime and put it in the right basket", + "subtask_index": 50 + }, + { + "subtask": "Grasp the long bread and put it in the right basket", + "subtask_index": 51 + }, + { + "subtask": "Grasp the yogurt and put it in the right basket", + "subtask_index": 52 + }, + { + "subtask": "Grasp the potato chips and put it in the right basket", + "subtask_index": 53 + }, + { + "subtask": "Grasp the can and put it in the right basket", + "subtask_index": 54 + }, + { + "subtask": "Grasp the ad milk and put it in the right basket", + "subtask_index": 55 + }, + { + "subtask": "Grasp the blue marker and put it in the left basket", + "subtask_index": 56 + }, + { + "subtask": "null", + "subtask_index": 57 } ], "atomic_actions": [ @@ -35291,25 +33562,27 @@ "place" ], "robot_name": [ - "Realman_RMC-AIDA-L" + "Galaxea_R1_Lite" ], "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -35320,30 +33593,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 19083, + "total_episodes": 195, + "total_frames": 160875, "fps": 30, - "total_tasks": 5, - "total_videos": 150, + "total_tasks": 58, + "total_videos": 780, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 28, - "action_dim": 28, - "camera_views": 3, - "dataset_size": "201.40 MB" + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "8.91 GB" }, - "frame_num": 19083, - "dataset_size": "201.40 MB", - "data_structure": "Realman_RMC-AIDA-L_storage_block_basket_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 160875, + "dataset_size": "8.91 GB", + "data_structure": "Galaxea_R1_Lite_classify_object_five_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (183 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:49" + "train": "0:194" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -35352,8 +33625,31 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -35365,8 +33661,8 @@ "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -35375,8 +33671,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -35388,8 +33684,8 @@ "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -35398,8 +33694,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -35411,73 +33707,45 @@ "observation.state": { "dtype": "float32", "shape": [ - 28 + 14 ], "names": [ - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "right_arm_joint_7_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad", "left_arm_joint_1_rad", "left_arm_joint_2_rad", "left_arm_joint_3_rad", "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_arm_joint_7_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad" + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 28 + 14 ], "names": [ - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "right_arm_joint_7_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad", "left_arm_joint_1_rad", "left_arm_joint_2_rad", "left_arm_joint_3_rad", "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_arm_joint_7_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad" + "right_gripper_open" ] }, "timestamp": { @@ -35709,118 +33977,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" - }, - "RMC-AIDA-L_plate_storage": { - "path": "RMC-AIDA-L_plate_storage", - "dataset_name": "plate_storage", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place the plate on the shelf with the right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "shelf", - "level1": "furniture", - "level2": "shelf", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-284602", - "dataset_size": "1.8GB", - "statistics": { - "total_episodes": 498, - "total_frames": 284602, - "total_tasks": 2, - "total_videos": 1494, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "8c203fd8-2988-4e8f-8ddc-eedbb69dcdfe", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the plate on the shelf with the right gripper", - "End", - "Pick up the plate with the right gripper", - "Pick up the plate with the left gripper", - "Pass the plate from the left gripper to the right gripper", - "Pass the plate from the right gripper to the left gripper", - "Place the plate on the shelf with the left gripper", - "abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "RMC-AIDA-L_plate_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_plate_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Galaxea_R1_Lite_mix_red_blue_right": { + "Airbot_MMK2_move_pan": { "task_categories": [ "robotics" ], @@ -35850,11 +34009,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_mix_red_blue_right", + "dataset_name": "Airbot_MMK2_move_pan", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "office_workspace", - "level2": "office", + "level1": "household", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -35862,142 +34021,74 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "red_pigment", - "level1": "materials", - "level2": "red_pigment", + "object_name": "pan", + "level1": "cookware", + "level2": "pan", "level3": null, "level4": null, "level5": null }, { - "object_name": "blue_pigment", - "level1": "materials", - "level2": "blue_pigment", + "object_name": "square_building_blocks", + "level1": "toys", + "level2": "square_building_blocks", "level3": null, "level4": null, "level5": null - }, + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "push the pan to the edge of the table with your left hand and place it on the building block with your right hand." + ], + "sub_tasks": [ { - "object_name": "yellow_pigment", - "level1": "materials", - "level2": "yellow_pigment", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the frying pan with the right gripper", + "subtask_index": 0 }, { - "object_name": "test_tube_rack", - "level1": "holding_utensils", - "level2": "test_tube_rack", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "test_tubes", - "level1": "laboratory_supplies", - "level2": "test_tubes", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "beaker", - "level1": "holding_utensils", - "level2": "beaker", - "level3": null, - "level4": null, - "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "pick up the test tube with red pigment and the test tube with blue pigment by grippers and pour them into the container." - ], - "sub_tasks": [ - { - "subtask": "Pour the red reagent into the graduated cylinder with right gripper", - "subtask_index": 0 - }, - { - "subtask": "Pour the red reagent into the graduated cylinder with the right gripper", + "subtask": "Abnormal", "subtask_index": 1 }, { - "subtask": "Pick up the test tube containing the red reagent with right gripper", + "subtask": "Place the frying pan on the red cube block with the right gripper", "subtask_index": 2 }, { - "subtask": "End", + "subtask": "Push the frying pan on left to right with the left gripper", "subtask_index": 3 }, { - "subtask": "Place the test tube into the bowl with right gripper", + "subtask": "End", "subtask_index": 4 }, - { - "subtask": "Pour the blue reagent into the graduated cylinder with the right gripper", - "subtask_index": 5 - }, - { - "subtask": "Pick up the test tube containing the blue reagent with right gripper", - "subtask_index": 6 - }, - { - "subtask": "Pour the blue reagent into the graduated cylinder with right gripper", - "subtask_index": 7 - }, - { - "subtask": "Grasp the blue reagent with the right gripper", - "subtask_index": 8 - }, - { - "subtask": "Grasp the yellow reagent with the right gripper", - "subtask_index": 9 - }, - { - "subtask": "Pour the yellow reagent into the graduated cylinder with the right gripper", - "subtask_index": 10 - }, - { - "subtask": "end", - "subtask_index": 11 - }, - { - "subtask": "Place the test tube into the bowl with the right gripper", - "subtask_index": 12 - }, - { - "subtask": "Grasp the red reagent with the right gripper", - "subtask_index": 13 - }, { "subtask": "null", - "subtask_index": 14 + "subtask_index": 5 } ], "atomic_actions": [ + "push", "grasp", "pick", - "place", - "pour" + "place" ], "robot_name": [ - "Galaxea_R1_Lite" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -36014,30 +34105,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 32072, + "total_episodes": 48, + "total_frames": 14881, "fps": 30, - "total_tasks": 15, - "total_videos": 200, + "total_tasks": 6, + "total_videos": 192, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, + "state_dim": 36, + "action_dim": 36, "camera_views": 4, - "dataset_size": "647.52 MB" + "dataset_size": "764.06 MB" }, - "frame_num": 32072, - "dataset_size": "647.52 MB", - "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_mix_red_blue_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 14881, + "dataset_size": "764.06 MB", + "data_structure": "Airbot_MMK2_move_pan_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:49" + "train": "0:47" }, "features": { - "observation.images.cam_head_left_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -36046,8 +34137,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -36056,11 +34147,11 @@ "has_audio": false } }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -36069,8 +34160,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -36079,10 +34170,10 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -36092,7 +34183,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -36102,10 +34193,10 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_front_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -36115,7 +34206,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -36128,7 +34219,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -36143,14 +34234,36 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -36165,8 +34278,30 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -36317,66 +34452,6 @@ 2 ], "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" } }, "authors": { @@ -36398,9 +34473,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_storage_cup_rubik's_cube": { + "Airbot_MMK2_push_toy_car": { "task_categories": [ "robotics" ], @@ -36430,7 +34505,7 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_cup_rubik's_cube", + "dataset_name": "Airbot_MMK2_push_toy_car", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", @@ -36442,25 +34517,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cup", - "level1": "kitchen_supplies", - "level2": "cup", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "rubik's_cube", - "level1": "toys", - "level2": "rubik's_cube", + "object_name": "toy_car", + "level1": "toy", + "level2": "toy_car", "level3": null, "level4": null, "level5": null @@ -36468,42 +34535,32 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "put the rubik's Cube into the plate with right hand and place the cup on top of the cube with left hand." + "push the toy car by hand." ], "sub_tasks": [ { - "subtask": "Grasp the paper cup with the left gripper", + "subtask": "End", "subtask_index": 0 }, { - "subtask": "Place the paper cup on the magic cube with the left gripper", + "subtask": "Static", "subtask_index": 1 }, { - "subtask": "Place the magic cube on the plate with the right gripper", + "subtask": "Push the toy car from left to right with the left gripper", "subtask_index": 2 }, { "subtask": "Abnormal", "subtask_index": 3 }, - { - "subtask": "Grasp the magic cube with the right gripper", - "subtask_index": 4 - }, - { - "subtask": "End", - "subtask_index": 5 - }, { "subtask": "null", - "subtask_index": 6 + "subtask_index": 4 } ], "atomic_actions": [ - "grasp", - "pick", - "place" + "push" ], "robot_name": [ "Airbot_MMK2" @@ -36537,23 +34594,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 47, - "total_frames": 13787, + "total_episodes": 50, + "total_frames": 6897, "fps": 30, - "total_tasks": 7, - "total_videos": 188, + "total_tasks": 5, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "387.14 MB" + "dataset_size": "175.81 MB" }, - "frame_num": 13787, - "dataset_size": "387.14 MB", - "data_structure": "Airbot_MMK2_storage_cup_rubik_s_cube_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 6897, + "dataset_size": "175.81 MB", + "data_structure": "Airbot_MMK2_push_toy_car_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:46" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -36907,7 +34964,102 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_storage_spoon": { + "leju_robot_pass_the_cleaner_d": { + "path": "leju_robot_pass_the_cleaner_d", + "dataset_name": "pass_the_cleaner_d", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Turn the bottle to the front side.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cleaner", + "level1": "daily_necessities", + "level2": "cleaner", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-168676", + "dataset_size": "8.7GB", + "statistics": { + "total_episodes": 451, + "total_frames": 168676, + "total_tasks": 1, + "total_videos": 1353, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "d1843bc1-83c4-4c84-8c34-c6740a7e82a9", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Turn the bottle to the front side.", + "Pick up the bottle from the table.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_pass_the_cleaner_d_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_pass_the_cleaner_d_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Galaxea_R1_Lite_storage_object_gray_plate": { "task_categories": [ "robotics" ], @@ -36937,11 +35089,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_spoon", + "dataset_name": "Galaxea_R1_Lite_storage_object_gray_plate", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "kitchen", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -36949,17 +35101,217 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "spoon", + "object_name": "gray_plate", "level1": "kitchen_supplies", - "level2": "spoon", + "level2": "gray_plate", "level3": null, "level4": null, "level5": null }, { - "object_name": "basin", - "level1": "home_storage", - "level2": "basin", + "object_name": "banana", + "level1": "fruits", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_necessities", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_cup", + "level1": "cups", + "level2": "blue_cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_pot", + "level1": "kitchen_supplies", + "level2": "blue_pot", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "toast_slices", + "level1": "bread", + "level2": "toast_slices", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_towel", + "level1": "towels", + "level2": "brown_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "can", + "level1": "snacks", + "level2": "can", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "coke", + "level1": "beverages", + "level2": "coke", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "potato_chips", + "level1": "snacks", + "level2": "potato_chips", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chocolate", + "level1": "snacks", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "compass", + "level1": "stationery", + "level2": "compass", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "block_pillar", + "level1": "toys", + "level2": "block_pillar", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "egg_beater", + "level1": "kitchen_supplies", + "level2": "egg_beater", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eraser", + "level1": "stationery", + "level2": "eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "snacks", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_chemical_products", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_lemon", + "level1": "fruits", + "level2": "green_lemon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "fruits", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "power_strip", + "level1": "electrical_control_equipment", + "level2": "power_strip", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "round_bread", + "level1": "bread", + "level2": "round_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_chemical_products", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "square_building_blocks", + "level1": "toys", + "level2": "square_building_blocks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tape", + "level1": "stationery", + "level2": "tape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cake", + "level1": "bread", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "duck", + "level1": "doll", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ambrosial_yogurt", + "level1": "beverages", + "level2": "ambrosial_yogurt", "level3": null, "level4": null, "level5": null @@ -36967,32 +35319,376 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the spoons on the table by hand and place them into the basin." + "use a gripper to pick the target object and place on the gray plate." ], "sub_tasks": [ { - "subtask": "Grasp the spoon with the right gripper", + "subtask": "Place the tape on the gray plate with the left gripper", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Grasp the blue pot with the left gripper", "subtask_index": 1 }, { - "subtask": "Grasp the spoon with the left gripper", + "subtask": "Grasp the plugboard with the left gripper", "subtask_index": 2 }, { - "subtask": "Place the spoon into the basin with the right gripper", + "subtask": "Grasp the potato chips with the right gripper", "subtask_index": 3 }, { - "subtask": "Place the spoon into the basin with the left gripper", + "subtask": "Grasp the banana with the left gripper", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "Place the round wooden block on the gray plate with the left gripper", "subtask_index": 5 + }, + { + "subtask": "Place the peach on the gray plate with the left gripper", + "subtask_index": 6 + }, + { + "subtask": "Grasp the compasses with the right gripper", + "subtask_index": 7 + }, + { + "subtask": "Place the square chewing gum on the gray plate with the right gripper", + "subtask_index": 8 + }, + { + "subtask": "Place the compasses on the gray plate with the right gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the potato chips on the gray plate with the right gripper", + "subtask_index": 10 + }, + { + "subtask": "Grasp the duck toy with the left gripper", + "subtask_index": 11 + }, + { + "subtask": "Place the green lemon on the gray plate with the right gripper", + "subtask_index": 12 + }, + { + "subtask": "Grasp the blue cup with the left gripper", + "subtask_index": 13 + }, + { + "subtask": "Place the square wooden block on the gray plate with the left gripper", + "subtask_index": 14 + }, + { + "subtask": "Grasp the soft facial cleanser with the left gripper", + "subtask_index": 15 + }, + { + "subtask": "Place the blue cup on the gray plate with the left gripper", + "subtask_index": 16 + }, + { + "subtask": "Grasp the square chewing gum with the left gripper", + "subtask_index": 17 + }, + { + "subtask": "Grasp the chocolate cake with the right gripper", + "subtask_index": 18 + }, + { + "subtask": "Grasp the shower sphere with the left gripper", + "subtask_index": 19 + }, + { + "subtask": "Place the blackboard erasure on the gray plate with the right gripper", + "subtask_index": 20 + }, + { + "subtask": "Grasp the plugboard with the right gripper", + "subtask_index": 21 + }, + { + "subtask": "Grasp the yogurt with the right gripper", + "subtask_index": 22 + }, + { + "subtask": "Grasp the brown towel with the left gripper", + "subtask_index": 23 + }, + { + "subtask": "Place the round bread on the gray plate with the right gripper", + "subtask_index": 24 + }, + { + "subtask": "Grasp the hard facial cleanser with the left gripper", + "subtask_index": 25 + }, + { + "subtask": "Place the shower sphere on the gray plate with the left gripper", + "subtask_index": 26 + }, + { + "subtask": "Grasp the chocolate with the right gripper", + "subtask_index": 27 + }, + { + "subtask": "Place the soft facial cleanser on the gray plate with the left gripper", + "subtask_index": 28 + }, + { + "subtask": "Grasp the peach with the right gripper", + "subtask_index": 29 + }, + { + "subtask": "Grasp the brown towel with the right gripper", + "subtask_index": 30 + }, + { + "subtask": "Place the shower sphere on the gray plate with the right gripper", + "subtask_index": 31 + }, + { + "subtask": "Grasp the back scratcher with the left gripper", + "subtask_index": 32 + }, + { + "subtask": "Place the square wooden block on the gray plate with the right gripper", + "subtask_index": 33 + }, + { + "subtask": "Place the blackboard erasure on the gray plate with the left gripper", + "subtask_index": 34 + }, + { + "subtask": "Place the yogurt on the gray plate with the right gripper", + "subtask_index": 35 + }, + { + "subtask": "Grasp the peach with the left gripper", + "subtask_index": 36 + }, + { + "subtask": "Place the coke on the gray plate with the left gripper", + "subtask_index": 37 + }, + { + "subtask": "Place the chocolate cake on the gray plate with the right gripper", + "subtask_index": 38 + }, + { + "subtask": "Place the banana on the gray plate with the left gripper", + "subtask_index": 39 + }, + { + "subtask": "Grasp the green lemon with the right gripper", + "subtask_index": 40 + }, + { + "subtask": "Grasp the bread slice with the right gripper", + "subtask_index": 41 + }, + { + "subtask": "Place the bread slice on the gray plate with the right gripper", + "subtask_index": 42 + }, + { + "subtask": "Place the brown towel on the gray plate with the right gripper", + "subtask_index": 43 + }, + { + "subtask": "Grasp the potato chips with the left gripper", + "subtask_index": 44 + }, + { + "subtask": "Grasp the duck toy with the right gripper", + "subtask_index": 45 + }, + { + "subtask": "End", + "subtask_index": 46 + }, + { + "subtask": "Grasp the blackboard erasure with the left gripper", + "subtask_index": 47 + }, + { + "subtask": "Grasp the coke with the left gripper", + "subtask_index": 48 + }, + { + "subtask": "Place the plugboard on the gray plate with the right gripper", + "subtask_index": 49 + }, + { + "subtask": "Place the round bread on the gray plate with the left gripper", + "subtask_index": 50 + }, + { + "subtask": "Grasp the round wooden block with the left gripper", + "subtask_index": 51 + }, + { + "subtask": "Grasp the compasses with the left gripper", + "subtask_index": 52 + }, + { + "subtask": "Place the round wooden block on the gray plate with the right gripper", + "subtask_index": 53 + }, + { + "subtask": "Place the blue pot on the gray plate with the left gripper", + "subtask_index": 54 + }, + { + "subtask": "Grasp the blue pot with the right gripper", + "subtask_index": 55 + }, + { + "subtask": "Grasp the round bread with the right gripper", + "subtask_index": 56 + }, + { + "subtask": "Grasp the chocolate cake with the left gripper", + "subtask_index": 57 + }, + { + "subtask": "Place the brown towel on the gray plate with the left gripper", + "subtask_index": 58 + }, + { + "subtask": "Place the blue pot on the gray plate with the right gripper", + "subtask_index": 59 + }, + { + "subtask": "Place the compasses on the gray plate with the left gripper", + "subtask_index": 60 + }, + { + "subtask": "Place the coke on the gray plate with the right gripper", + "subtask_index": 61 + }, + { + "subtask": "Grasp the tin with the right gripper", + "subtask_index": 62 + }, + { + "subtask": "Place the square chewing gum on the gray plate with the left gripper", + "subtask_index": 63 + }, + { + "subtask": "Place the back scratcher on the gray plate with the left gripper", + "subtask_index": 64 + }, + { + "subtask": "Grasp the tape with the right gripper", + "subtask_index": 65 + }, + { + "subtask": "Place the tin on the gray plate with the right gripper", + "subtask_index": 66 + }, + { + "subtask": "Grasp the coke with the right gripper", + "subtask_index": 67 + }, + { + "subtask": "Grasp the tape with the left gripper", + "subtask_index": 68 + }, + { + "subtask": "Grasp the square chewing gum with the right gripper", + "subtask_index": 69 + }, + { + "subtask": "Place the tape on the gray plate with the right gripper", + "subtask_index": 70 + }, + { + "subtask": "Place the hard facial cleanser on the gray plate with the left gripper", + "subtask_index": 71 + }, + { + "subtask": "Grasp the shower sphere with the right gripper", + "subtask_index": 72 + }, + { + "subtask": "Place the duck toy on the gray plate with the right gripper", + "subtask_index": 73 + }, + { + "subtask": "Grasp the bread slice with the left gripper", + "subtask_index": 74 + }, + { + "subtask": "Place the chocolate on the gray plate with the right gripper", + "subtask_index": 75 + }, + { + "subtask": "Grasp the blackboard erasure with the right gripper", + "subtask_index": 76 + }, + { + "subtask": "Place the banana on the gray plate with the right gripper", + "subtask_index": 77 + }, + { + "subtask": "Place the peach on the gray plate with the right gripper", + "subtask_index": 78 + }, + { + "subtask": "Grasp the round bread with the left gripper", + "subtask_index": 79 + }, + { + "subtask": "Grasp the square wooden block with the left gripper", + "subtask_index": 80 + }, + { + "subtask": "Grasp the blue cup with the right gripper", + "subtask_index": 81 + }, + { + "subtask": "Place the chocolate cake on the gray plate with the left gripper", + "subtask_index": 82 + }, + { + "subtask": "Place the duck toy on the gray plate with the left gripper", + "subtask_index": 83 + }, + { + "subtask": "Grasp the square wooden block with the right gripper", + "subtask_index": 84 + }, + { + "subtask": "Place the plugboard on the gray plate with the left gripper", + "subtask_index": 85 + }, + { + "subtask": "Place the bread slice on the gray plate with the left gripper", + "subtask_index": 86 + }, + { + "subtask": "Grasp the round wooden block with the right gripper", + "subtask_index": 87 + }, + { + "subtask": "Place the potato chips on the gray plate with the left gripper", + "subtask_index": 88 + }, + { + "subtask": "Grasp the banana with the right gripper", + "subtask_index": 89 + }, + { + "subtask": "Place the blue cup on the gray plate with the right gripper", + "subtask_index": 90 + }, + { + "subtask": "null", + "subtask_index": 91 } ], "atomic_actions": [ @@ -37001,21 +35697,21 @@ "place" ], "robot_name": [ - "Airbot_MMK2" + "Galaxea_R1_Lite" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -37032,30 +35728,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 54, - "total_frames": 15550, + "total_episodes": 102, + "total_frames": 20357, "fps": 30, - "total_tasks": 6, - "total_videos": 216, + "total_tasks": 92, + "total_videos": 408, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, + "state_dim": 14, + "action_dim": 14, "camera_views": 4, - "dataset_size": "509.63 MB" + "dataset_size": "786.26 MB" }, - "frame_num": 15550, - "dataset_size": "509.63 MB", - "data_structure": "Airbot_MMK2_storage_spoon_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (42 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 20357, + "dataset_size": "786.26 MB", + "data_structure": "Galaxea_R1_Lite_storage_object_gray_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (90 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:53" + "train": "0:101" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -37064,8 +35760,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -37074,11 +35770,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_head_right_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -37087,8 +35783,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -37097,11 +35793,11 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -37110,8 +35806,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -37120,11 +35816,11 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -37133,8 +35829,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -37146,7 +35842,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -37161,36 +35857,14 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -37205,30 +35879,8 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "timestamp": { @@ -37379,6 +36031,66 @@ 2 ], "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" } }, "authors": { @@ -37400,112 +36112,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" - }, - "G1edu-u3_tray_storage_lemon_a": { - "path": "G1edu-u3_tray_storage_lemon_a", - "dataset_name": "tray_storage_lemon_a", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "pick", - "place" - ], - "tasks": "Place the lemon on the blue plate with left gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tray", - "level1": "container", - "level2": "tray", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "lemon", - "level1": "fruits", - "level2": "lemon", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-2298", - "dataset_size": "27.2MB", - "statistics": { - "total_episodes": 13, - "total_frames": 2298, - "total_tasks": 1, - "total_videos": 13, - "total_chunks": 1, - "chunks_size": 13, - "fps": 30 - }, - "dataset_uuid": "50af80c8-eb74-4dec-9689-8bea0af5a8c2", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the lemon on the blue plate with left gripper", - "Place the lemon on the blue plate with right gripper", - "End", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_tray_storage_lemon_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_tray_storage_lemon_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "agilex_cobot_magic_pass_object_right_to_left_black_tablecloth": { + "Airbot_MMK2_move_fake_food": { "task_categories": [ "robotics" ], @@ -37535,11 +36144,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "agilex_cobot_magic_pass_object_right_to_left_black_tablecloth", + "dataset_name": "Airbot_MMK2_move_fake_food", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "commercial_convenience", - "level2": "supermarket", + "level1": "household", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -37547,311 +36156,67 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "home_storage", - "level2": "table", + "object_name": "early_education_toys", + "level1": "toys", + "level2": "early_education_toys", "level3": null, "level4": null, "level5": null - }, + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "put the toy food on the toy table with your right hand." + ], + "sub_tasks": [ { - "object_name": "ambrosial_yogurt", - "level1": "food", - "level2": "ambrosial_yogurt", - "level3": null, - "level4": null, - "level5": null + "subtask": "End", + "subtask_index": 0 }, { - "object_name": "banana", - "level1": "food", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the small bowl of canned food with right gripper", + "subtask_index": 1 }, { - "object_name": "long_bread", - "level1": "food", - "level2": "long_bread", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the small bowl of canned food on the table with the right gripper", + "subtask_index": 2 }, { - "object_name": "milk", - "level1": "food", - "level2": "milk", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yogurt", - "level1": "food", - "level2": "yogurt", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "grape", - "level1": "food", - "level2": "grape", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "ham_sausage", - "level1": "food", - "level2": "ham_sausage", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "eggplant", - "level1": "food", - "level2": "eggplant", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chewing_gum", - "level1": "food", - "level2": "chewing_gum", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "eyeglass_case", - "level1": "laboratory_supplies", - "level2": "eyeglass_case", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "rubik's_cube", - "level1": "toys", - "level2": "rubik's_cube", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "purple_trash_bag", - "level1": "trash", - "level2": "purple_trash_bag", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cleanser", - "level1": "daily_necessities", - "level2": "cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bathing_in_flowers", - "level1": "daily_necessities", - "level2": "bathing_in_flowers", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "whiteboard_eraser", - "level1": "stationery", - "level2": "whiteboard_eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "candle", - "level1": "daily_necessities", - "level2": "candle", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "black_table_cloths", - "level1": "laboratory_supplies", - "level2": "black_table_cloths", - "level3": null, - "level4": null, - "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "use the right gripper to pick up the item and transfer it from the right gripper to the left gripper." - ], - "sub_tasks": [ - { - "subtask": "The left gripper places milk on the left side of the table", - "subtask_index": 0 - }, - { - "subtask": "The left gripper places bread on the left side of the table", - "subtask_index": 1 - }, - { - "subtask": "Pass the bread to the left gripper", - "subtask_index": 2 - }, - { - "subtask": "Pass the shower sphere to the left gripper", - "subtask_index": 3 - }, - { - "subtask": "Use the right gripper to grab the bread on the right side of the table", - "subtask_index": 4 - }, - { - "subtask": "Use the right gripper to grab the grape on the right side of the table", - "subtask_index": 5 - }, - { - "subtask": "End", - "subtask_index": 6 - }, - { - "subtask": "Use the right gripper to grab the banana on the right side of the table", - "subtask_index": 7 - }, - { - "subtask": "Pass the purple garbage bag to the left gripper", - "subtask_index": 8 - }, - { - "subtask": "Pass the Rubik's Cube to the left gripper", - "subtask_index": 9 - }, - { - "subtask": "The left gripper places yogurt on the left side of the table", - "subtask_index": 10 - }, - { - "subtask": "The left gripper places shower sphere on the left side of the table", - "subtask_index": 11 - }, - { - "subtask": "Use the right gripper to grab the shower sphere on the right side of the table", - "subtask_index": 12 - }, - { - "subtask": "Use the right gripper to grab the grapes on the right side of the table", - "subtask_index": 13 - }, - { - "subtask": "Pass the yogurt to the left gripper", - "subtask_index": 14 - }, - { - "subtask": "The left gripper places grape on the left side of the table", - "subtask_index": 15 - }, - { - "subtask": "The left gripper places grapes on the left side of the table", - "subtask_index": 16 - }, - { - "subtask": "Use the right gripper to grab the milk on the right side of the table", - "subtask_index": 17 - }, - { - "subtask": "Pass the milk to the left gripper", - "subtask_index": 18 - }, - { - "subtask": "The left gripper places banana on the left side of the table", - "subtask_index": 19 - }, - { - "subtask": "Use the right gripper to grab the eyeglass case on the right side of the table", - "subtask_index": 20 - }, - { - "subtask": "Pass the banana to the left gripper", - "subtask_index": 21 - }, - { - "subtask": "Pass the grapes to the left gripper", - "subtask_index": 22 - }, - { - "subtask": "Use the right gripper to grab the purple garbage bag on the right side of the table", - "subtask_index": 23 - }, - { - "subtask": "Pass the grape to the left gripper", - "subtask_index": 24 - }, - { - "subtask": "The left gripper places eyeglass case on the left side of the table", - "subtask_index": 25 - }, - { - "subtask": "Pass the eyeglass case to the left gripper", - "subtask_index": 26 - }, - { - "subtask": "Use the right gripper to grab the yogurt on the right side of the table", - "subtask_index": 27 - }, - { - "subtask": "Use the right gripper to grab the Rubik's Cube on the right side of the table", - "subtask_index": 28 - }, - { - "subtask": "\nPass the milk to the left gripper\n", - "subtask_index": 29 - }, - { - "subtask": "The left gripper places purple garbage bag on the left side of the table", - "subtask_index": 30 + "subtask": "Abnormal", + "subtask_index": 3 }, { "subtask": "null", - "subtask_index": 31 + "subtask_index": 4 } ], "atomic_actions": [ "grasp", - "lift", - "lower", - "handover", - "takeover" + "pick", + "place" ], "robot_name": [ - "agilex_cobot_magic" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -37862,23 +36227,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 93, - "total_frames": 59541, + "total_episodes": 50, + "total_frames": 7610, "fps": 30, - "total_tasks": 32, - "total_videos": 279, + "total_tasks": 5, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "1.15 GB" + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "313.62 MB" }, - "frame_num": 59541, - "dataset_size": "1.15 GB", - "data_structure": "Agilex_Cobot_Magic_pass_object_right_to_left_black_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (81 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 7610, + "dataset_size": "313.62 MB", + "data_structure": "Airbot_MMK2_move_fake_food_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:92" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -37950,10 +36315,33 @@ "has_audio": false } }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, "observation.state": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -37962,32 +36350,42 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -37996,26 +36394,36 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -38055,17 +36463,17 @@ }, "subtask_annotation": { "names": null, - "dtype": "int32", "shape": [ 5 - ] + ], + "dtype": "int32" }, "scene_annotation": { "names": null, - "dtype": "int32", "shape": [ 1 - ] + ], + "dtype": "int32" }, "eef_sim_pose_state": { "names": [ @@ -38082,10 +36490,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_sim_pose_action": { "names": [ @@ -38102,130 +36510,70 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", "shape": [ 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" ], - "dtype": "float32", - "shape": [ - 2 - ] + "dtype": "int32" } }, "authors": { @@ -38247,14 +36595,14 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "AIRBOT_MMK2_clean_the_desktop": { - "path": "AIRBOT_MMK2_clean_the_desktop", - "dataset_name": "clean_the_desktop", + "Galbot_g1_steamer_storage_baozi_h": { + "path": "Galbot_g1_steamer_storage_baozi_h", + "dataset_name": "steamer_storage_baozi_h", "robot_type": "", "end_effector_type": [ - "five_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ @@ -38273,27 +36621,51 @@ "level5": null }, { - "object_name": "calculator_box", - "level1": "tools", - "level2": "umbrella", + "object_name": "baozi", + "level1": "food", + "level2": "baozi", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "steamer", + "level1": "cookware", + "level2": "steamer", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pot_lid", + "level1": "daily_necessities", + "level2": "pot_lid", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-8709", - "dataset_size": "327.5MB", + "operation_platform_height": null, + "frame_range": "0-979757", + "dataset_size": "18.2GB", "statistics": { - "total_episodes": 49, - "total_frames": 8709, + "total_episodes": 996, + "total_frames": 979757, "total_tasks": 1, - "total_videos": 196, + "total_videos": 2988, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "05f361bc-b167-4623-b38b-a9371aa7d999", + "dataset_uuid": "23188104-2751-4169-b13b-286381a44645", "language": [ "en", "zh" @@ -38303,10 +36675,10 @@ ], "sub_tasks": [ "End", - "Grasp the tissue with the right gripper", - "Place the umbrella on the white lid with the left gripper", - "Place the tissue on the white lid with the right gripper", - "Grasp the umbrella with the left gripper", + "Place the baozi on the steamer with right gripper", + "Place the pot lid on the steamer with left gripper", + "Grasp the pot lid with left gripper", + "Grasp the baozi in the plate with right gripper", "null" ], "annotations": { @@ -38344,10 +36716,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_clean_the_desktop_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_clean_the_desktop_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Galbot_g1_steamer_storage_baozi_h_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Galbot_g1_steamer_storage_baozi_h_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_take_bottle_umbrella": { + "Airbot_MMK2_pull_tissue": { "task_categories": [ "robotics" ], @@ -38377,7 +36749,7 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_take_bottle_umbrella", + "dataset_name": "Airbot_MMK2_pull_tissue", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", @@ -38389,25 +36761,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "empty_bottle", - "level1": "mineral_water", - "level2": "empty_bottle", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "umbrella", - "level1": "daily_necessities", - "level2": "umbrella", + "object_name": "tissue_paper", + "level1": "kitchen_supplies", + "level2": "paper_towels", "level3": null, "level4": null, "level5": null }, { - "object_name": "lid", - "level1": "laboratory_supplies", - "level2": "lid", + "object_name": "tissue_box", + "level1": "kitchen_supplies", + "level2": "paper_towels", "level3": null, "level4": null, "level5": null @@ -38415,11 +36779,11 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "take the umbrella off the lid with right hand, and then remove the bottle from the lid with left hand." + "take a tissue paper out of tissue box by hand." ], "sub_tasks": [ { - "subtask": " Grasp the umbrella placed on the white lid with the left gripper", + "subtask": "Place the tissue on the table with the right gripper", "subtask_index": 0 }, { @@ -38427,15 +36791,15 @@ "subtask_index": 1 }, { - "subtask": "Grasp the Yibao placed on the white lid with the left gripper", + "subtask": "Pull out a piece of tissue with the right gripper", "subtask_index": 2 }, { - "subtask": "Place the umbrella on the table with the right gripper", + "subtask": "Abnormal", "subtask_index": 3 }, { - "subtask": "Place the Yibao on the table with the left gripper", + "subtask": "Grasp the a piece of tissue with the right gripper", "subtask_index": 4 }, { @@ -38480,23 +36844,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 54, - "total_frames": 18663, + "total_episodes": 49, + "total_frames": 11531, "fps": 30, "total_tasks": 6, - "total_videos": 216, + "total_videos": 196, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "759.87 MB" + "dataset_size": "405.79 MB" }, - "frame_num": 18663, - "dataset_size": "759.87 MB", - "data_structure": "Airbot_MMK2_take_bottle_umbrella_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (42 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 11531, + "dataset_size": "405.79 MB", + "data_structure": "Airbot_MMK2_pull_tissue_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:53" + "train": "0:48" }, "features": { "observation.images.cam_head_rgb": { @@ -38850,539 +37214,652 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_storage_bottle_part": { + "leju_robot_pass_the_cleaner_b": { + "path": "leju_robot_pass_the_cleaner_b", + "dataset_name": "pass_the_cleaner_b", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Turn the bottle to the front side.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cleaner", + "level1": "daily_necessities", + "level2": "cleaner", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-164222", + "dataset_size": "8.5GB", + "statistics": { + "total_episodes": 441, + "total_frames": 164222, + "total_tasks": 1, + "total_videos": 1323, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "ceb2e30d-5b33-4f0f-b9e0-a88ee69d8e21", + "language": [ + "en", + "zh" + ], "task_categories": [ "robotics" ], - "language": [ - "en" + "sub_tasks": [ + "Turn the bottle to the front side.", + "Pick up the bottle from the table.", + "null" ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", "tags": [ "RoboCOIN", "LeRobot" ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_pass_the_cleaner_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_pass_the_cleaner_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Cobot_Magic_place_the_test_tube": { + "path": "Cobot_Magic_place_the_test_tube", + "dataset_name": "place_the_test_tube", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_bottle_part", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "commercial_convenience", - "level2": "supermarket", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the test tubes on the test tube rack with the right gripper", "objects": [ { - "object_name": "water_bottle", - "level1": "beverages", - "level2": "water_bottle", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "handle", - "level1": "laboratory_supplies", - "level2": "handle", + "object_name": "test_tube", + "level1": "tool", + "level2": "test_tube", "level3": null, "level4": null, "level5": null }, { - "object_name": "carton", - "level1": "laboratory_supplies", - "level2": "carton", + "object_name": "test_tube_rack", + "level1": "tool", + "level2": "test_tube_rack", "level3": null, "level4": null, "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "put the water bottle and handle into the cardboard box." - ], - "sub_tasks": [ - { - "subtask": "Grasp the water bottle with the right gripper", - "subtask_index": 0 - }, - { - "subtask": "Place the water bottle into the cardboard box with the right gripper", - "subtask_index": 1 - }, - { - "subtask": "Lift the water bottle with the right gripper", - "subtask_index": 2 - }, - { - "subtask": "Lift the remote control clip with the left gripper ", - "subtask_index": 3 }, { - "subtask": "Abnormal", - "subtask_index": 4 - }, - { - "subtask": "Static", - "subtask_index": 5 - }, - { - "subtask": "Place the remote control clip into the cardboard box with the left gripper", - "subtask_index": 6 - }, - { - "subtask": "End", - "subtask_index": 7 - }, - { - "subtask": "Grasp the remote control clip with the left gripper ", - "subtask_index": 8 - }, - { - "subtask": "null", - "subtask_index": 9 + "object_name": "tablecloth", + "level1": "clothing", + "level2": "tablecloth", + "level3": null, + "level4": null, + "level5": null } ], - "atomic_actions": [ - "grasp", - "pick", - "place" + "operation_platform_height": 77.2, + "frame_range": "0-308346", + "dataset_size": "6.0GB", + "statistics": { + "total_episodes": 364, + "total_frames": 308346, + "total_tasks": 6, + "total_videos": 1092, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 50 + }, + "dataset_uuid": "070ad40e-7f20-4d3e-82d6-9cb8c0fb1f74", + "language": [ + "en", + "zh" ], - "robot_name": [ - "Airbot_MMK2" + "task_categories": [ + "robotics" ], - "end_effector_type": "five_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "sub_tasks": [ + "Place the test tubes on the test tube rack with the right gripper", + "Lift the test tube with the left hand.", + "End", + "Pass the test tube from the left gripper to the right gripper", + "Pick up the test tubes from the table", + "Place the test tubes on the rack", + "Hand over the test tubes", + "Grab the test tube with the left hand.", + "Receive the test tube with the right hand.", + "Pick up the test tube with the left gripper", + "Insert it into the hole at the intersection of the 5th row from the top and the 5th column from the left with the right hand.", + "abnormal", + "null" ], - "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" + "data_schema": "Cobot_Magic_place_the_test_tube_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_place_the_test_tube_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Galbot_g1_fold_clothe_c": { + "path": "Galbot_g1_fold_clothe_c", + "dataset_name": "fold_clothe_c", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place", + "flod" + ], + "tasks": "Abnormal", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "clothes", + "level1": "fabric", + "level2": "clothes", + "level3": null, + "level4": null, + "level5": null + } ], + "operation_platform_height": null, + "frame_range": "0-516810", + "dataset_size": "22.3GB", "statistics": { - "total_episodes": 49, - "total_frames": 10499, - "fps": 30, - "total_tasks": 10, - "total_videos": 196, + "total_episodes": 666, + "total_frames": 516810, + "total_tasks": 1, + "total_videos": 1998, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "508.16 MB" + "fps": 30 }, - "frame_num": 10499, - "dataset_size": "508.16 MB", - "data_structure": "Airbot_MMK2_storage_bottle_part_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", - "splits": { - "train": "0:48" + "dataset_uuid": "9c9c15eb-08d9-4119-b551-ce2439643f0f", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Abnormal", + "use the left gripper to clamp the left edge of the fabric", + "Drag the clothes to the center of the table", + "End", + "use both grippers simultaneously to clamp the upper edge of the clothing fabric", + "use both grippers to drag the lower edge of the fabric forward and fold it over the upper edge", + "use both grippers to drag the upper edge of the fabric backward and fold it over the lower edge", + "use both grippers simultaneously to clamp the lower edge of the clothing fabric", + "Drag the clothes downward with both gripper", + "use the left gripper to drag the left edge of the fabric to the left and folds it over the right edge", + "Flip the folded clothes over with left gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" }, - "features": { - "observation.images.cam_head_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } - }, - "observation.images.cam_left_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Galbot_g1_fold_clothe_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Galbot_g1_fold_clothe_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "R1_Lite_open_the_food_pan": { + "path": "R1_Lite_open_the_food_pan", + "dataset_name": "open_the_food_pan", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Hold the pot lid with the right arm", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, - "observation.images.cam_right_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false + { + "object_name": "pan", + "level1": "container", + "level2": "pan", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-74981", + "dataset_size": "2.6GB", + "statistics": { + "total_episodes": 94, + "total_frames": 74981, + "total_tasks": 1, + "total_videos": 282, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "41f586d5-51f7-456a-b30f-ca5a7e7555c2", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Hold the pot lid with the right arm", + "Close the pot lid", + "Open the pot lid completely", + "Hold the pot lid with the left arm", + "Open the food pot lid a small opening with the right arm", + "Open the food pot lid a small opening with the left arm", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } - }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_open_the_food_pan_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_open_the_food_pan_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "leju_robot_pass_the_cleaner_e": { + "path": "leju_robot_pass_the_cleaner_e", + "dataset_name": "pass_the_cleaner_e", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Turn the bottle to the front side.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, - "observation.state": { - "dtype": "float32", - "shape": [ - 36 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" - ] - }, - "action": { - "dtype": "float32", - "shape": [ - 36 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" - ] - }, - "timestamp": { - "dtype": "float32", - "shape": [ - 1 - ], - "names": null - }, - "frame_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "episode_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "task_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "subtask_annotation": { - "names": null, - "shape": [ - 5 - ], - "dtype": "int32" - }, - "scene_annotation": { - "names": null, - "shape": [ - 1 - ], - "dtype": "int32" - }, - "eef_sim_pose_state": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_sim_pose_action": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_direction_action": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "cleaner", + "level1": "daily_necessities", + "level2": "cleaner", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-165558", + "dataset_size": "8.6GB", + "statistics": { + "total_episodes": 430, + "total_frames": 165558, + "total_tasks": 1, + "total_videos": 1290, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "5857e16d-9974-4991-a27c-549f3ba35f03", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Turn the bottle to the front side.", + "Pick up the bottle from the table.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_pass_the_cleaner_e_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_pass_the_cleaner_e_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "RMC-AIDA-L_basket_storage_orange": { + "path": "RMC-AIDA-L_basket_storage_orange", + "dataset_name": "basket_storage_orange", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the dark basket in the center of view with the right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "basket", + "level1": "container", + "level2": "basket", + "level3": null, + "level4": null, + "level5": null }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "orange", + "level1": "fruit", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null } + ], + "operation_platform_height": 77.2, + "frame_range": "0-112423", + "dataset_size": "1.1GB", + "statistics": { + "total_episodes": 356, + "total_frames": 112423, + "total_tasks": 4, + "total_videos": 1068, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "f96c79dc-15ee-43ed-8c21-1e898a816ee6", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the dark basket in the center of view with the right gripper", + "Place the light basket in the center of view with the right gripper", + "Place the orange into the dark basket with the left gripper", + "Place the dark basket in the center of view with the left gripper", + "end", + "Place the orange into the light basket with the left gripper", + "Pick up the light basket with the right gripper", + "Place the orange into the light basket with the right gripper", + "Pick up the orange with the right gripper", + "Place the light basket in the center of view with the left gripper", + "Pick up the dark basket with the left gripper", + "Pick up the light basket with the left gripper", + "Pick up the dark basket with the right gripper", + "Pick up the orange with the left gripper", + "Place the orange into the dark basket with the right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" }, "authors": { "contributed_by": [ { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } ] }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "RMC-AIDA-L_basket_storage_orange_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_basket_storage_orange_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "AIRBOT_MMK2_mobile_calculator_box": { - "path": "AIRBOT_MMK2_mobile_calculator_box", - "dataset_name": "mobile_calculator_box", + "Split_aloha_scoop_coffee_beans": { + "path": "Split_aloha_scoop_coffee_beans", + "dataset_name": "scoop_coffee_beans", "robot_type": "", "end_effector_type": [ - "five_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ + "scoop", "grasp", - "place", - "pick" + "pick", + "place" ], - "tasks": "Grasp the calculator case with left gripper", + "tasks": "Place the spoon back in its original place", "objects": [ { "object_name": "table", @@ -39393,35 +37870,51 @@ "level5": null }, { - "object_name": "calculator_box", + "object_name": "cup", "level1": "container", - "level2": "calculator_box", + "level2": "cup", "level3": null, "level4": null, "level5": null }, { - "object_name": "lid", + "object_name": "cup_lid", "level1": "container", - "level2": "lid", + "level2": "cup_lid", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "spoon", + "level1": "tableware", + "level2": "spoon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "coffee_beans", + "level1": "food", + "level2": "coffee_beans", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-34396", - "dataset_size": "1.1GB", + "frame_range": "0-409440", + "dataset_size": "3.8GB", "statistics": { - "total_episodes": 150, - "total_frames": 34396, - "total_tasks": 2, - "total_videos": 600, + "total_episodes": 499, + "total_frames": 409440, + "total_tasks": 8, + "total_videos": 1497, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "7629d634-2d89-41ad-a722-364a319f374b", + "dataset_uuid": "643f286d-7021-4fb1-8c63-aecd24fb763e", "language": [ "en", "zh" @@ -39430,16 +37923,125 @@ "robotics" ], "sub_tasks": [ - "Grasp the calculator case with left gripper", + "Place the spoon back in its original place", + "Move the cup to the center of view", + "Pick up the spoon", + "Scoop a spoonful of beans into the cup", + "abnormal", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Split_aloha_scoop_coffee_beans_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Split_aloha_scoop_coffee_beans_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "R1_Lite_put_on_a_garbage_bag": { + "path": "R1_Lite_put_on_a_garbage_bag", + "dataset_name": "put_on_a_garbage_bag", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Abnormal", + "objects": [ + { + "object_name": "trash_bag", + "level1": "container", + "level2": "trash_bag", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "trash_can", + "level1": "container", + "level2": "trash_can", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "trash_can_ring", + "level1": "daily_necessities", + "level2": "trash_can_ring", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-252689", + "dataset_size": "11.0GB", + "statistics": { + "total_episodes": 133, + "total_frames": 252689, + "total_tasks": 1, + "total_videos": 399, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "c689a888-c8c9-4a05-a250-680b40218803", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Abnormal", "End", - "Place the calculator case in the center of table with right gripper", - "Place the white box in the center of table with right gripper", - "Pass the white box to right gripper", - "Grasp the yellow book with left gripper", - "Place the yellow book in the center of table with right gripper", - "Pass the yellow book to right gripper", - "Pass the calculator case to right gripper", - "Grasp the white box with left gripper", + "Grasp a new trash bag with the right gripper", + "put the new garbage bag on the trash can", + "Place the trash can ring on the trash can with the right gripper", + "Pick up the trash bag", + "Grasp the trash bag with the right gripper", + "place the trash can ring on the floor with right gripper", + "Place the trash bag on the floor with the right gripper", + "Place the trash can ring on the floor", + "Pick up a new trash bag", + "Place the trash can ring on the trash can", + "Place the trash bag on the floor", "null" ], "annotations": { @@ -39477,10 +38079,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_mobile_calculator_box_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_mobile_calculator_box_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "R1_Lite_put_on_a_garbage_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_put_on_a_garbage_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_turn_page": { + "Airbot_MMK2_storage_and_take_cake_plate": { "task_categories": [ "robotics" ], @@ -39510,11 +38112,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_turn_page", + "dataset_name": "Airbot_MMK2_storage_and_take_cake_plate", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "study_room", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -39522,9 +38124,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "book", - "level1": "stationery", - "level2": "books", + "object_name": "cake", + "level1": "bread", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", "level3": null, "level4": null, "level5": null @@ -39532,28 +38142,42 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "turn a page of the english book on the table with right hand." + "put the cake into the plate with left hand and take it out with right hand." ], "sub_tasks": [ { - "subtask": "End", + "subtask": "Place the cake on the table with the right gripper", "subtask_index": 0 }, { - "subtask": "Turn the book to the next page with the right gripper", + "subtask": "Grasp the cake with the left gripper", "subtask_index": 1 }, { - "subtask": "Abnormal", + "subtask": "Static", "subtask_index": 2 }, { - "subtask": "null", + "subtask": "Place the cake into the white plate with the left gripper", "subtask_index": 3 + }, + { + "subtask": "Grasp the cake on the plate with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "End", + "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 } ], "atomic_actions": [ - "flip" + "grasp", + "pick", + "place" ], "robot_name": [ "Airbot_MMK2" @@ -39587,23 +38211,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 149, - "total_frames": 19581, + "total_episodes": 50, + "total_frames": 9782, "fps": 30, - "total_tasks": 4, - "total_videos": 596, + "total_tasks": 7, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "740.88 MB" + "dataset_size": "487.34 MB" }, - "frame_num": 19581, - "dataset_size": "740.88 MB", - "data_structure": "Airbot_MMK2_turn_page_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (137 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 9782, + "dataset_size": "487.34 MB", + "data_structure": "Airbot_MMK2_storage_and_take_cake_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:148" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -39957,157 +38581,350 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Agilex_Cobot_Magic_move_mouse_pen_red_tablecloth": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" + "leju_robot_hotel_services_f": { + "path": "leju_robot_hotel_services_f", + "dataset_name": "hotel_services_f", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_move_mouse_pen_red_tablecloth", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "office & workspace", - "level2": "office", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "tasks": "Hand the room key to the person.", "objects": [ { "object_name": "table", - "level1": "home_storage", + "level1": "furniture", "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "deli_water-based_marker", - "level1": "stationery", - "level2": "deli_water-based_marker", + "object_name": "box", + "level1": "home_storage", + "level2": "box", "level3": null, "level4": null, "level5": null }, { - "object_name": "notebook", - "level1": "stationery", - "level2": "notebook", + "object_name": "card", + "level1": "nfc", + "level2": "card", "level3": null, "level4": null, "level5": null }, { - "object_name": "mouse", - "level1": "appliances", - "level2": "mouse", + "object_name": "sensor", + "level1": "electronic_products", + "level2": "sensor", "level3": null, "level4": null, "level5": null - }, + } + ], + "operation_platform_height": null, + "frame_range": "0-95863", + "dataset_size": "6.3GB", + "statistics": { + "total_episodes": 135, + "total_frames": 95863, + "total_tasks": 1, + "total_videos": 405, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "9c2a5a4c-c510-4e80-983e-593589a8eec6", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Hand the room key to the person.", + "Place the ID card on the card reader.", + "Take the ID card from the person's hand.", + "Pick up the room key from the key card box.", + "Hand the ID card to the person.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_hotel_services_f_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_hotel_services_f_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "G1edu-u3_pullBowl_storage_bread_unordered_C": { + "path": "G1edu-u3_pullBowl_storage_bread_unordered_C", + "dataset_name": "pullBowl_storage_bread_unordered_C", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the long bread in pink bowl with left hand", + "objects": [ { - "object_name": "mouse_pad", - "level1": "appliances", - "level2": "mouse_pad", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "red_table_cloths", - "level1": "laboratory_supplies", - "level2": "red_table_cloths", + "object_name": "basket", + "level1": "home_storage", + "level2": "basket", "level3": null, "level4": null, "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "the left gripper organize the pen on the notebook then the right gripper organize the mouse on the mouse pad." - ], - "sub_tasks": [ - { - "subtask": "Right gripper", - "subtask_index": 0 }, { - "subtask": "Pick up the mouse with right gripper", - "subtask_index": 1 + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Pick up the marker pen with right gripper", - "subtask_index": 2 + "object_name": "bowl", + "level1": "bowl", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Pick up the mouse with left gripper", - "subtask_index": 3 + "object_name": "donut", + "level1": "food", + "level2": "donut", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "End", - "subtask_index": 4 + "object_name": "cake", + "level1": "bread", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-110085", + "dataset_size": "2.2GB", + "statistics": { + "total_episodes": 190, + "total_frames": 110085, + "total_tasks": 1, + "total_videos": 570, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "11ed8a7f-106a-4c08-98a4-0abdd1b97a51", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the long bread in pink bowl with left hand", + "End", + "Grasp the round bread with left hand", + "Grasp the long bread with left hand", + "Place the round bread in pink bowl with left hand", + "Move the pink bowl to the center of table with right hand", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_pullBowl_storage_bread_unordered_C_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_pullBowl_storage_bread_unordered_C_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Realman_RMC-AIDA-L_storage_block_basket": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Realman_RMC-AIDA-L_storage_block_basket", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ { - "subtask": "Place the marker pen on the notebook with right gripper", - "subtask_index": 5 + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the mouse on the mouse pad with left gripper", - "subtask_index": 6 + "object_name": "basket", + "level1": "home_storage", + "level2": "basket", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the marker pen on the notebook with left gripper", - "subtask_index": 7 + "object_name": "building_block", + "level1": "toy", + "level2": "building_block", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "the left gripper grasp the basket on the table, the right grippe pick up the blocks on the table and place it into the basket." + ], + "sub_tasks": [ + { + "subtask": "End", + "subtask_index": 0 }, { - "subtask": "Left gripper", - "subtask_index": 8 + "subtask": "Grasp the blue cube with the right gripper", + "subtask_index": 1 }, { - "subtask": "Pick up the marker pen with left gripper", - "subtask_index": 9 + "subtask": "Place the blue cube into the basket with the right gripper", + "subtask_index": 2 }, { - "subtask": "Place the mouse on the mouse pad with right gripper", - "subtask_index": 10 + "subtask": "Grasp the basket with the left gripper", + "subtask_index": 3 }, { "subtask": "null", - "subtask_index": 11 + "subtask_index": 4 } ], "atomic_actions": [ "grasp", - "lift", - "lower" + "pick", + "place" ], "robot_name": [ - "Agilex_Cobot_Magic" + "Realman_RMC-AIDA-L" ], "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", @@ -40136,23 +38953,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 30, - "total_frames": 20689, + "total_episodes": 50, + "total_frames": 19083, "fps": 30, - "total_tasks": 12, - "total_videos": 90, + "total_tasks": 5, + "total_videos": 150, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, + "state_dim": 28, + "action_dim": 28, "camera_views": 3, - "dataset_size": "664.79 MB" + "dataset_size": "201.40 MB" }, - "frame_num": 20689, - "dataset_size": "664.79 MB", - "data_structure": "Agilex_Cobot_Magic_move_mouse_pen_red_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (18 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 19083, + "dataset_size": "201.40 MB", + "data_structure": "Realman_RMC-AIDA-L_storage_block_basket_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:29" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -40227,69 +39044,73 @@ "observation.state": { "dtype": "float32", "shape": [ - 26 + 28 ], "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", + "right_arm_joint_7_rad", "right_gripper_open", "right_eef_pos_x_m", "right_eef_pos_y_m", "right_eef_pos_z_m", "right_eef_rot_euler_x_rad", "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" - ] - }, - "action": { - "dtype": "float32", - "shape": [ - 26 - ], - "names": [ + "right_eef_rot_euler_z_rad", "left_arm_joint_1_rad", "left_arm_joint_2_rad", "left_arm_joint_3_rad", "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_arm_joint_7_rad", "left_gripper_open", "left_eef_pos_x_m", "left_eef_pos_y_m", "left_eef_pos_z_m", "left_eef_rot_euler_x_rad", "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", + "left_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 28 + ], + "names": [ "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", + "right_arm_joint_7_rad", "right_gripper_open", "right_eef_pos_x_m", "right_eef_pos_y_m", "right_eef_pos_z_m", "right_eef_rot_euler_x_rad", "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "right_eef_rot_euler_z_rad", + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_arm_joint_7_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -40329,17 +39150,17 @@ }, "subtask_annotation": { "names": null, - "dtype": "int32", "shape": [ 5 - ] + ], + "dtype": "int32" }, "scene_annotation": { "names": null, - "dtype": "int32", "shape": [ 1 - ] + ], + "dtype": "int32" }, "eef_sim_pose_state": { "names": [ @@ -40356,10 +39177,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_sim_pose_action": { "names": [ @@ -40376,130 +39197,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" }, "gripper_mode_state": { "names": [ "left_gripper_mode", "right_gripper_mode" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "gripper_mode_action": { "names": [ "left_gripper_mode", "right_gripper_mode" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "gripper_activity_state": { "names": [ "left_gripper_activity", "right_gripper_activity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "gripper_activity_action": { "names": [ "left_gripper_activity", "right_gripper_activity" ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", "shape": [ 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" ], - "dtype": "float32", - "shape": [ - 2 - ] + "dtype": "int32" } }, "authors": { @@ -40523,117 +39344,9 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "AgiBot-g1_mobile_accessory_storage_box_d": { - "path": "AgiBot-g1_mobile_accessory_storage_box_d", - "dataset_name": "mobile_accessory_storage_box_d", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place", - "pick" - ], - "tasks": "Grasp the mouse and data cable", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "container", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "accessories", - "level1": "electronic_products", - "level2": "accessories", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-17860", - "dataset_size": "7.6GB", - "statistics": { - "total_episodes": 30, - "total_frames": 17860, - "total_tasks": 1, - "total_videos": 240, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "add3803d-befc-4ae5-8bc7-b6d81060cdcf", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the mouse and data cable", - "Place the mouse and data cable in the paper box", - "End", - "Place the mouse in the paper box", - "Grab and pick up the mouse and power cord from the accessory packaging area.", - "Abnormal", - "Place the mouse and power cord into the box.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AgiBot-g1_mobile_accessory_storage_box_d_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_mobile_accessory_storage_box_d_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "alpha_bot_2_pass_the_sandbag": { - "path": "alpha_bot_2_pass_the_sandbag", - "dataset_name": "pass_the_sandbag", + "RMC-AIDA-L_plate_storage": { + "path": "RMC-AIDA-L_plate_storage", + "dataset_name": "plate_storage", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -40644,104 +39357,7 @@ "pick", "place" ], - "tasks": "Pass the paper ball to the right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "sandbag", - "level1": "toy", - "level2": "sandbag", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-69956", - "dataset_size": "2.0GB", - "statistics": { - "total_episodes": 86, - "total_frames": 69956, - "total_tasks": 1, - "total_videos": 344, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "81e68aa6-8f11-4edf-9c00-cc322e4fc0c9", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Pass the paper ball to the right gripper", - "End", - "Place the paper ball on the table with right gripper", - "Grasp the paper ball with left gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "alpha_bot_2_pass_the_sandbag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "alpha_bot_2_pass_the_sandbag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "leju_robot_box_storage_parcel": { - "path": "leju_robot_box_storage_parcel", - "dataset_name": "box_storage_parcel", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Grasp the package on the scanner with left gripper", + "tasks": "Place the plate on the shelf with the right gripper", "objects": [ { "object_name": "table", @@ -40752,258 +39368,35 @@ "level5": null }, { - "object_name": "box", - "level1": "home_storage", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "parcel", + "object_name": "plate", "level1": "container", - "level2": "parcel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "conveyor_belt", - "level1": "industrial_equipment", - "level2": "conveyor_belt", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-537851", - "dataset_size": "27.2GB", - "statistics": { - "total_episodes": 443, - "total_frames": 537851, - "total_tasks": 1, - "total_videos": 1329, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "b6a8ebae-9a8f-44c4-a7a1-7893c558480b", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the package on the scanner with left gripper", - "Abnormal", - "Put the parcel inside the courier box.", - "Place the parcel onto the inbound machine.", - "Place the parcel into the delivery box.", - "Grasp the package on the conveyor belt with right gripper", - "Place the package in the green box with left gripper", - "End", - "Pick up the parcel from the conveyor belt.", - "Take the parcel off the conveyor belt.", - "Place the package on the scanner with right gripper", - "Pick up the parcel from the inbound machine.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_box_storage_parcel_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_box_storage_parcel_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "leju_robot_moving_parts_s": { - "path": "leju_robot_moving_parts_s", - "dataset_name": "moving_parts_s", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Return to the initial position at the shelf", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "parts", - "level1": "mechanical_parts", - "level2": "parts", + "level2": "plate", "level3": null, "level4": null, "level5": null }, { - "object_name": "cabinet", - "level1": "home_storage", - "level2": "cabinet", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-776972", - "dataset_size": "53.8GB", - "statistics": { - "total_episodes": 495, - "total_frames": 776972, - "total_tasks": 1, - "total_videos": 1485, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "4f3b96f0-482a-45ca-9176-d8cdf62101c7", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Return to the initial position at the shelf", - "Grasp the black part with right gripper", - "End", - "Abnormal", - "Pick up the large material from the shelf", - "Place the black part on the table with right gripper", - "Move to the table behind body", - "Move the large material to the workbench", - "Insert the large material into the corresponding slot on the workbench", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_moving_parts_s_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_moving_parts_s_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AIRBOT_MMK2_clean_the_desktop_a": { - "path": "AIRBOT_MMK2_clean_the_desktop_a", - "dataset_name": "clean_the_desktop_a", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place the calculator case in the center of table with right gripper", - "objects": [ - { - "object_name": "table", + "object_name": "shelf", "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "calculator_box", - "level1": "tools", - "level2": "umbrella", + "level2": "shelf", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-9819", - "dataset_size": "338.3MB", + "frame_range": "0-284602", + "dataset_size": "1.8GB", "statistics": { - "total_episodes": 50, - "total_frames": 9819, - "total_tasks": 1, - "total_videos": 200, + "total_episodes": 498, + "total_frames": 284602, + "total_tasks": 2, + "total_videos": 1494, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "05f361bc-b167-4623-b38b-a9371aa7d998", + "dataset_uuid": "8c203fd8-2988-4e8f-8ddc-eedbb69dcdfe", "language": [ "en", "zh" @@ -41012,10 +39405,14 @@ "robotics" ], "sub_tasks": [ - "Place the calculator case in the center of table with right gripper", - "Grasp the calculator case with left gripper", - "Pass the calculator case to right gripper", + "Place the plate on the shelf with the right gripper", "End", + "Pick up the plate with the right gripper", + "Pick up the plate with the left gripper", + "Pass the plate from the left gripper to the right gripper", + "Pass the plate from the right gripper to the left gripper", + "Place the plate on the shelf with the left gripper", + "abnormal", "null" ], "annotations": { @@ -41053,10 +39450,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_clean_the_desktop_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_clean_the_desktop_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "RMC-AIDA-L_plate_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_plate_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "agilex_cobot_magic_pass_object_left_to_right_khaki_tablecloth": { + "Galaxea_R1_Lite_mix_red_blue_right": { "task_categories": [ "robotics" ], @@ -41086,11 +39483,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "agilex_cobot_magic_pass_object_left_to_right_khaki_tablecloth", + "dataset_name": "Galaxea_R1_Lite_mix_red_blue_right", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "commercial_convenience", - "level2": "supermarket", + "level1": "office_workspace", + "level2": "office", "level3": null, "level4": null, "level5": null @@ -41098,145 +39495,49 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "home_storage", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "ambrosial_yogurt", - "level1": "food", - "level2": "ambrosial_yogurt", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "food", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "long_bread", - "level1": "food", - "level2": "long_bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "milk", - "level1": "food", - "level2": "milk", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yogurt", - "level1": "food", - "level2": "yogurt", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "grape", - "level1": "food", - "level2": "grape", + "object_name": "red_pigment", + "level1": "materials", + "level2": "red_pigment", "level3": null, "level4": null, "level5": null }, { - "object_name": "ham_sausage", - "level1": "food", - "level2": "ham_sausage", + "object_name": "blue_pigment", + "level1": "materials", + "level2": "blue_pigment", "level3": null, "level4": null, "level5": null }, { - "object_name": "eggplant", - "level1": "food", - "level2": "eggplant", + "object_name": "yellow_pigment", + "level1": "materials", + "level2": "yellow_pigment", "level3": null, "level4": null, "level5": null }, { - "object_name": "chewing_gum", - "level1": "food", - "level2": "chewing_gum", + "object_name": "test_tube_rack", + "level1": "holding_utensils", + "level2": "test_tube_rack", "level3": null, "level4": null, "level5": null }, { - "object_name": "eyeglass_case", + "object_name": "test_tubes", "level1": "laboratory_supplies", - "level2": "eyeglass_case", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "rubik's_cube", - "level1": "toys", - "level2": "rubik's_cube", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "purple_trash_bag", - "level1": "trash", - "level2": "purple_trash_bag", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cleanser", - "level1": "daily_necessities", - "level2": "cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bathing_in_flowers", - "level1": "daily_necessities", - "level2": "bathing_in_flowers", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "whiteboard_eraser", - "level1": "stationery", - "level2": "whiteboard_eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "candle", - "level1": "daily_necessities", - "level2": "candle", + "level2": "test_tubes", "level3": null, "level4": null, "level5": null }, { - "object_name": "khaki_table_cloths", - "level1": "laboratory_supplies", - "level2": "khaki_table_cloths", + "object_name": "beaker", + "level1": "holding_utensils", + "level2": "beaker", "level3": null, "level4": null, "level5": null @@ -41244,205 +39545,98 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "use the left gripper to pick up the item and transfer it from the left gripper to the right gripper." + "pick up the test tube with red pigment and the test tube with blue pigment by grippers and pour them into the container." ], "sub_tasks": [ { - "subtask": "Pass the purple garbage bag to the right gripper", + "subtask": "Pour the red reagent into the graduated cylinder with right gripper", "subtask_index": 0 }, { - "subtask": "Pass the shower sphere to the right gripper", + "subtask": "Pour the red reagent into the graduated cylinder with the right gripper", "subtask_index": 1 }, { - "subtask": "Place the milk on the table with the right gripper", + "subtask": "Pick up the test tube containing the red reagent with right gripper", "subtask_index": 2 }, { - "subtask": "Place the XX on the table with the right gripper", + "subtask": "End", "subtask_index": 3 }, { - "subtask": "Pass the yogurt to the right gripper", + "subtask": "Place the test tube into the bowl with right gripper", "subtask_index": 4 }, { - "subtask": "End", + "subtask": "Pour the blue reagent into the graduated cylinder with the right gripper", "subtask_index": 5 }, { - "subtask": "Pass the milk to the right gripper", + "subtask": "Pick up the test tube containing the blue reagent with right gripper", "subtask_index": 6 }, { - "subtask": "Place the purple garbage bag on the table with the right gripper", + "subtask": "Pour the blue reagent into the graduated cylinder with right gripper", "subtask_index": 7 }, { - "subtask": "Place the Rubik's Cube on the table with the right gripper", + "subtask": "Grasp the blue reagent with the right gripper", "subtask_index": 8 }, { - "subtask": "Grasp the blue blackboard erasure with the left gripper", + "subtask": "Grasp the yellow reagent with the right gripper", "subtask_index": 9 }, { - "subtask": "Grasp the shower sphere with the left gripper", + "subtask": "Pour the yellow reagent into the graduated cylinder with the right gripper", "subtask_index": 10 }, { - "subtask": "Grasp the plush banana with the left gripper", + "subtask": "end", "subtask_index": 11 }, { - "subtask": "Grasp the yogurt with the left gripper", + "subtask": "Place the test tube into the bowl with the right gripper", "subtask_index": 12 }, { - "subtask": "Grasp the milk with the left gripper\n", + "subtask": "Grasp the red reagent with the right gripper", "subtask_index": 13 }, { - "subtask": "Pass the plush banana to the right gripper", + "subtask": "null", "subtask_index": 14 - }, - { - "subtask": "Place the blue blackboard erasure on the table with the right gripper", - "subtask_index": 15 - }, - { - "subtask": "Abnormal", - "subtask_index": 16 - }, - { - "subtask": "Grasp the XX with the left gripper", - "subtask_index": 17 - }, - { - "subtask": "Grasp the blue blackboard erasure with the left grippe", - "subtask_index": 18 - }, - { - "subtask": "Pass the Rubik's Cube to the right gripper", - "subtask_index": 19 - }, - { - "subtask": "Place the milk on the table with the right gripper\n", - "subtask_index": 20 - }, - { - "subtask": "Pass the long bread to the right gripper\n", - "subtask_index": 21 - }, - { - "subtask": "\nPass the milk to the right gripper", - "subtask_index": 22 - }, - { - "subtask": "Pass the long bread to the right gripper", - "subtask_index": 23 - }, - { - "subtask": "Grasp the milk with the left gripper", - "subtask_index": 24 - }, - { - "subtask": "Pass the blue blackboard erasure to the right gripper", - "subtask_index": 25 - }, - { - "subtask": "Place the long bread on the table with the right gripper", - "subtask_index": 26 - }, - { - "subtask": "Grasp the long bread with the left gripper", - "subtask_index": 27 - }, - { - "subtask": "Grasp the Rubik's Cube with the left gripper", - "subtask_index": 28 - }, - { - "subtask": "Place the yogurt on the table with the right gripper", - "subtask_index": 29 - }, - { - "subtask": "Place the plush banana on the table with the right gripper", - "subtask_index": 30 - }, - { - "subtask": "Pass the milk to the right gripper\n", - "subtask_index": 31 - }, - { - "subtask": "Place the grape on the table with the right gripper", - "subtask_index": 32 - }, - { - "subtask": "Grasp the long bread with the left gripper\n", - "subtask_index": 33 - }, - { - "subtask": "Pass the milk to the right gripper", - "subtask_index": 34 - }, - { - "subtask": "Pass the grape to the right gripper", - "subtask_index": 35 - }, - { - "subtask": "Pass the blue garbage bag to the right gripper", - "subtask_index": 36 - }, - { - "subtask": "Pass the xx to the right gripper", - "subtask_index": 37 - }, - { - "subtask": "Grasp the grape with the left gripper", - "subtask_index": 38 - }, - { - "subtask": "Grasp the purple garbage bag with the left gripper", - "subtask_index": 39 - }, - { - "subtask": "Place the shower sphere on the table with the right gripper", - "subtask_index": 40 - }, - { - "subtask": "null", - "subtask_index": 41 } ], "atomic_actions": [ "grasp", - "lift", - "lower", - "handover", - "takeover" + "pick", + "place", + "pour" ], "robot_name": [ - "agilex_cobot_magic" + "Galaxea_R1_Lite" ], "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -41453,30 +39647,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 97, - "total_frames": 53653, + "total_episodes": 50, + "total_frames": 32072, "fps": 30, - "total_tasks": 42, - "total_videos": 291, + "total_tasks": 15, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "636.04 MB" + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "647.52 MB" }, - "frame_num": 53653, - "dataset_size": "636.04 MB", - "data_structure": "Agilex_Cobot_Magic_pass_object_left_to_right_khaki_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (85 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 32072, + "dataset_size": "647.52 MB", + "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_mix_red_blue_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:96" + "train": "0:49" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -41485,8 +39679,31 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -41498,7 +39715,7 @@ "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -41508,7 +39725,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -41521,7 +39738,7 @@ "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -41531,7 +39748,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -41544,7 +39761,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 26 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -41553,32 +39770,20 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_gripper_open", + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -41587,26 +39792,14 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_gripper_open", + "right_gripper_open" ] }, "timestamp": { @@ -41646,17 +39839,17 @@ }, "subtask_annotation": { "names": null, - "dtype": "int32", "shape": [ 5 - ] + ], + "dtype": "int32" }, "scene_annotation": { "names": null, - "dtype": "int32", "shape": [ 1 - ] + ], + "dtype": "int32" }, "eef_sim_pose_state": { "names": [ @@ -41673,10 +39866,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_sim_pose_action": { "names": [ @@ -41693,130 +39886,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" }, "gripper_mode_state": { "names": [ "left_gripper_mode", "right_gripper_mode" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "gripper_mode_action": { "names": [ "left_gripper_mode", "right_gripper_mode" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "gripper_activity_state": { "names": [ "left_gripper_activity", "right_gripper_activity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "gripper_activity_action": { "names": [ "left_gripper_activity", "right_gripper_activity" ], - "dtype": "int32", "shape": [ 2 - ] - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" ], - "dtype": "float32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] + "dtype": "int32" } }, "authors": { @@ -41838,9 +40031,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Airbot_MMK2_storage_block_both_hands": { + "Airbot_MMK2_storage_cup_rubik's_cube": { "task_categories": [ "robotics" ], @@ -41870,7 +40063,7 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_block_both_hands", + "dataset_name": "Airbot_MMK2_storage_cup_rubik's_cube", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", @@ -41882,17 +40075,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "square_building_blocks", - "level1": "toys", - "level2": "square_building_blocks", + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", "level3": null, "level4": null, "level5": null }, { - "object_name": "cream_storage_basket", - "level1": "home_storage", - "level2": "cream_storage_basket", + "object_name": "cup", + "level1": "kitchen_supplies", + "level2": "cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rubik's_cube", + "level1": "toys", + "level2": "rubik's_cube", "level3": null, "level4": null, "level5": null @@ -41900,32 +40101,36 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the building blocks with both hands simultaneously and put them into the white storage box." + "put the rubik's Cube into the plate with right hand and place the cup on top of the cube with left hand." ], "sub_tasks": [ { - "subtask": "Grasp the red block with the left gripper", + "subtask": "Grasp the paper cup with the left gripper", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Place the paper cup on the magic cube with the left gripper", "subtask_index": 1 }, { - "subtask": "Grasp the orange block with the right gripper", + "subtask": "Place the magic cube on the plate with the right gripper", "subtask_index": 2 }, { - "subtask": "Place the red block into the white basket with the left gripper", + "subtask": "Abnormal", "subtask_index": 3 }, { - "subtask": "Place the orange block into the white basket with the right gripper", + "subtask": "Grasp the magic cube with the right gripper", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "End", "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 } ], "atomic_actions": [ @@ -41965,23 +40170,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 49, - "total_frames": 3769, + "total_episodes": 47, + "total_frames": 13787, "fps": 30, - "total_tasks": 6, - "total_videos": 196, + "total_tasks": 7, + "total_videos": 188, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "104.08 MB" + "dataset_size": "387.14 MB" }, - "frame_num": 3769, - "dataset_size": "104.08 MB", - "data_structure": "Airbot_MMK2_storage_block_both_hands_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 13787, + "dataset_size": "387.14 MB", + "data_structure": "Airbot_MMK2_storage_cup_rubik_s_cube_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:48" + "train": "0:46" }, "features": { "observation.images.cam_head_rgb": { @@ -42335,681 +40540,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "AIRBOT_MMK2_store_beauty_blender_and_building_blocks": { - "path": "AIRBOT_MMK2_store_beauty_blender_and_building_blocks", - "dataset_name": "store_beauty_blender_and_building_blocks", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place", - "pick" - ], - "tasks": "Grasp the green cuboid block with the right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "marble", - "level1": "toy", - "level2": "marble", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "building_blocks", - "level1": "toy", - "level2": "building_blocks", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-9178", - "dataset_size": "300.5MB", - "statistics": { - "total_episodes": 50, - "total_frames": 9178, - "total_tasks": 1, - "total_videos": 200, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "fee911a7-d998-45da-b549-b1621ca063be", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the green cuboid block with the right gripper", - "Place the bullet into the bowl with the left gripper", - "End", - "Grasp the bullet with the left gripper", - "Abnormal", - "Place the green cuboid block into the bowl with the right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_store_beauty_blender_and_building_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_store_beauty_blender_and_building_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "RMC-AIDA-L_place_the_fruits_repeatedly": { - "path": "RMC-AIDA-L_place_the_fruits_repeatedly", - "dataset_name": "place_the_fruits_repeatedly", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place", - "pick", - "pour" - ], - "tasks": "Pick up the bowl with the left gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peach", - "level1": "fruit", - "level2": "peach", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "orange", - "level1": "fruit", - "level2": "orange", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_yolk_pastry", - "level1": "food", - "level2": "egg_yolk_pastry", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bread", - "level1": "food", - "level2": "bread", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-541540", - "dataset_size": "3.7GB", - "statistics": { - "total_episodes": 481, - "total_frames": 541540, - "total_tasks": 8, - "total_videos": 1443, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "3448d8ea-8209-43cf-bfcc-550863a26b13", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Pick up the bowl with the left gripper", - "Place the bread into the bowl", - "abnormal", - "Pick up the egg yolk pastry with the right gripper", - "Pour the egg yolk pastry out of bowl", - "Pour the orange out of the bowl", - "Place the peach into the bowl", - "Pour the peach out of bowl", - "Place the egg yolk pastry into the bowl", - "Pick up the bread with the right gripper", - "Pick up the peach with the right gripper", - "Pour the bread out of the bowl", - "end", - "Place the bowl on the table", - "Place the orange into the bowl", - "Pick up the orange with the right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "RMC-AIDA-L_place_the_fruits_repeatedly_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_place_the_fruits_repeatedly_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AIRBOT_MMK2_place_the_piano_and_the_needle-nose_pliers": { - "path": "AIRBOT_MMK2_place_the_piano_and_the_needle-nose_pliers", - "dataset_name": "place_the_piano_and_the_needle-nose_pliers", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place", - "pick" - ], - "tasks": "End", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "hand_tapping_the_qin", - "level1": "toy", - "level2": "hand_tapping_the_qin", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "lid", - "level1": "container", - "level2": "lid", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "long-nose_pliers", - "level1": "tool", - "level2": "long-nose_pliers", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-25318", - "dataset_size": "826.8MB", - "statistics": { - "total_episodes": 49, - "total_frames": 25318, - "total_tasks": 1, - "total_videos": 196, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "230992b4-bba2-4f5b-99ce-03e63d148ce1", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "End", - "Static", - "Place the tongs on the white lid with the right gripper", - "Grasp the tongs with the right gripper", - "Place the xylophone on the white lid with the left gripper", - "Grasp the xylophone with the left gripper", - "Abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_place_the_piano_and_the_needle-nose_pliers_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_place_the_piano_and_the_needle-nose_pliers_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AIRBOT_MMK2_take_the_book": { - "path": "AIRBOT_MMK2_take_the_book", - "dataset_name": "take_the_book", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "pick" - ], - "tasks": "Lay the book down with the right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bookshelf", - "level1": "container", - "level2": "bookshelf", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "book", - "level1": "stationery", - "level2": "book", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-9314", - "dataset_size": "352.3MB", - "statistics": { - "total_episodes": 79, - "total_frames": 9314, - "total_tasks": 2, - "total_videos": 316, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "a04264d7-7e81-40cc-af53-0ec87db1f556", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Lay the book down with the right gripper", - "Hold the book with the right gripper", - "Abnormal", - "End", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_take_the_book_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_take_the_book_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Cobot_Magic_move_the_plate": { - "path": "Cobot_Magic_move_the_plate", - "dataset_name": "move_the_plate", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Pick the container that can hold fruits", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-25057", - "dataset_size": "418.0MB", - "statistics": { - "total_episodes": 98, - "total_frames": 25057, - "total_tasks": 1, - "total_videos": 294, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 50 - }, - "dataset_uuid": "8d39690e-e415-4f1c-9e39-2c0a83b6fe12", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Pick the container that can hold fruits", - "Move the picked object to the right side of the table", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_move_the_plate_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_move_the_plate_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AgiBot-g1_robotic_arm_picks_up_battery": { - "path": "AgiBot-g1_robotic_arm_picks_up_battery", - "dataset_name": "robotic_arm_picks_up_battery", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place the power supply on the operating table.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "carton", - "level1": "laboratory_supplies", - "level2": "carton", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "battery", - "level1": "tool", - "level2": "battery", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-30215", - "dataset_size": "17.9GB", - "statistics": { - "total_episodes": 56, - "total_frames": 30215, - "total_tasks": 1, - "total_videos": 448, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "14af3598-fff7-4c0b-bca2-16c57884c70e", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the power supply on the operating table.", - "Grab and lift the power supply from the large box.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AgiBot-g1_robotic_arm_picks_up_battery_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_robotic_arm_picks_up_battery_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Airbot_MMK2_storage_braised_pork_belly_shrimp": { + "Airbot_MMK2_storage_spoon": { "task_categories": [ "robotics" ], @@ -43039,7 +40570,7 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_braised_pork_belly_shrimp", + "dataset_name": "Airbot_MMK2_storage_spoon", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", @@ -43051,25 +40582,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "shrimp", - "level1": "prepared_dishes", - "level2": "shrimp", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "braised_pork", - "level1": "prepared_dishes", - "level2": "braised_pork", + "object_name": "spoon", + "level1": "kitchen_supplies", + "level2": "spoon", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "plates", - "level2": "plate", + "object_name": "basin", + "level1": "home_storage", + "level2": "basin", "level3": null, "level4": null, "level5": null @@ -43077,36 +40600,32 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "put the braised pork and shrimp on the plate" + "pick up the spoons on the table by hand and place them into the basin." ], "sub_tasks": [ { - "subtask": "Place the braised pork in brown sauce into the plate with the left gripper", + "subtask": "Grasp the spoon with the right gripper", "subtask_index": 0 }, { - "subtask": "Grasp the shrimp with the right gripper", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "Place the shrimp into the plate with the right gripper", + "subtask": "Grasp the spoon with the left gripper", "subtask_index": 2 }, { - "subtask": "Grasp the cake from the table and with the left gripper", + "subtask": "Place the spoon into the basin with the right gripper", "subtask_index": 3 }, { - "subtask": "End", + "subtask": "Place the spoon into the basin with the left gripper", "subtask_index": 4 }, - { - "subtask": "Grasp the braised pork in brown sauce with the left gripper", - "subtask_index": 5 - }, { "subtask": "null", - "subtask_index": 6 + "subtask_index": 5 } ], "atomic_actions": [ @@ -43146,23 +40665,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 38, - "total_frames": 5834, + "total_episodes": 54, + "total_frames": 15550, "fps": 30, - "total_tasks": 7, - "total_videos": 152, + "total_tasks": 6, + "total_videos": 216, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "277.59 MB" + "dataset_size": "509.63 MB" }, - "frame_num": 5834, - "dataset_size": "277.59 MB", - "data_structure": "Airbot_MMK2_storage_braised_pork_belly_shrimp_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (26 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 15550, + "dataset_size": "509.63 MB", + "data_structure": "Airbot_MMK2_storage_spoon_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (42 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:37" + "train": "0:53" }, "features": { "observation.images.cam_head_rgb": { @@ -43516,7 +41035,110 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Agilex_Cobot_Magic_erase_board_right": { + "G1edu-u3_tray_storage_lemon_a": { + "path": "G1edu-u3_tray_storage_lemon_a", + "dataset_name": "tray_storage_lemon_a", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "pick", + "place" + ], + "tasks": "Place the lemon on the blue plate with left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tray", + "level1": "container", + "level2": "tray", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "lemon", + "level1": "fruits", + "level2": "lemon", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-2298", + "dataset_size": "27.2MB", + "statistics": { + "total_episodes": 13, + "total_frames": 2298, + "total_tasks": 1, + "total_videos": 13, + "total_chunks": 1, + "chunks_size": 13, + "fps": 30 + }, + "dataset_uuid": "50af80c8-eb74-4dec-9689-8bea0af5a8c2", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the lemon on the blue plate with left gripper", + "Place the lemon on the blue plate with right gripper", + "End", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_tray_storage_lemon_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_tray_storage_lemon_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "agilex_cobot_magic_pass_object_right_to_left_black_tablecloth": { "task_categories": [ "robotics" ], @@ -43546,11 +41168,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_erase_board_right", + "dataset_name": "agilex_cobot_magic_pass_object_right_to_left_black_tablecloth", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "education", - "level2": "school", + "level1": "commercial_convenience", + "level2": "supermarket", "level3": null, "level4": null, "level5": null @@ -43566,9 +41188,113 @@ "level5": null }, { - "object_name": "whiteboard", - "level1": "stationery", - "level2": "whiteboard", + "object_name": "ambrosial_yogurt", + "level1": "food", + "level2": "ambrosial_yogurt", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "food", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "long_bread", + "level1": "food", + "level2": "long_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "milk", + "level1": "food", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yogurt", + "level1": "food", + "level2": "yogurt", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "grape", + "level1": "food", + "level2": "grape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ham_sausage", + "level1": "food", + "level2": "ham_sausage", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eggplant", + "level1": "food", + "level2": "eggplant", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eyeglass_case", + "level1": "laboratory_supplies", + "level2": "eyeglass_case", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rubik's_cube", + "level1": "toys", + "level2": "rubik's_cube", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "purple_trash_bag", + "level1": "trash", + "level2": "purple_trash_bag", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cleanser", + "level1": "daily_necessities", + "level2": "cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_necessities", + "level2": "bathing_in_flowers", "level3": null, "level4": null, "level5": null @@ -43580,95 +41306,212 @@ "level3": null, "level4": null, "level5": null + }, + { + "object_name": "candle", + "level1": "daily_necessities", + "level2": "candle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "black_table_cloths", + "level1": "laboratory_supplies", + "level2": "black_table_cloths", + "level3": null, + "level4": null, + "level5": null } ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "use the right gripper to pick up the board eraser, wipe the notes on the whiteboard clean, and then put them back in place." + "use the right gripper to pick up the item and transfer it from the right gripper to the left gripper." ], "sub_tasks": [ { - "subtask": "Right gripper", + "subtask": "The left gripper places milk on the left side of the table", "subtask_index": 0 }, { - "subtask": "Wipe off the writing on the board with right gripper", + "subtask": "The left gripper places bread on the left side of the table", "subtask_index": 1 }, { - "subtask": "Place the board eraser on the right side of board with right gripper", + "subtask": "Pass the bread to the left gripper", "subtask_index": 2 }, { - "subtask": "Move the board eraser to the right of the whiteboard with right gripper", + "subtask": "Pass the shower sphere to the left gripper", "subtask_index": 3 }, { - "subtask": "Pick up the board eraser with right gripper", + "subtask": "Use the right gripper to grab the bread on the right side of the table", "subtask_index": 4 }, { - "subtask": "End", + "subtask": "Use the right gripper to grab the grape on the right side of the table", "subtask_index": 5 }, { - "subtask": "null", + "subtask": "End", "subtask_index": 6 - } - ], - "atomic_actions": [ - "grasp", - "lift", - "wipe", - "handover" - ], - "robot_name": [ - "Agilex_Cobot_Magic" - ], - "end_effector_type": "two_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb" - ], - "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" - }, - "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" - ], + }, + { + "subtask": "Use the right gripper to grab the banana on the right side of the table", + "subtask_index": 7 + }, + { + "subtask": "Pass the purple garbage bag to the left gripper", + "subtask_index": 8 + }, + { + "subtask": "Pass the Rubik's Cube to the left gripper", + "subtask_index": 9 + }, + { + "subtask": "The left gripper places yogurt on the left side of the table", + "subtask_index": 10 + }, + { + "subtask": "The left gripper places shower sphere on the left side of the table", + "subtask_index": 11 + }, + { + "subtask": "Use the right gripper to grab the shower sphere on the right side of the table", + "subtask_index": 12 + }, + { + "subtask": "Use the right gripper to grab the grapes on the right side of the table", + "subtask_index": 13 + }, + { + "subtask": "Pass the yogurt to the left gripper", + "subtask_index": 14 + }, + { + "subtask": "The left gripper places grape on the left side of the table", + "subtask_index": 15 + }, + { + "subtask": "The left gripper places grapes on the left side of the table", + "subtask_index": 16 + }, + { + "subtask": "Use the right gripper to grab the milk on the right side of the table", + "subtask_index": 17 + }, + { + "subtask": "Pass the milk to the left gripper", + "subtask_index": 18 + }, + { + "subtask": "The left gripper places banana on the left side of the table", + "subtask_index": 19 + }, + { + "subtask": "Use the right gripper to grab the eyeglass case on the right side of the table", + "subtask_index": 20 + }, + { + "subtask": "Pass the banana to the left gripper", + "subtask_index": 21 + }, + { + "subtask": "Pass the grapes to the left gripper", + "subtask_index": 22 + }, + { + "subtask": "Use the right gripper to grab the purple garbage bag on the right side of the table", + "subtask_index": 23 + }, + { + "subtask": "Pass the grape to the left gripper", + "subtask_index": 24 + }, + { + "subtask": "The left gripper places eyeglass case on the left side of the table", + "subtask_index": 25 + }, + { + "subtask": "Pass the eyeglass case to the left gripper", + "subtask_index": 26 + }, + { + "subtask": "Use the right gripper to grab the yogurt on the right side of the table", + "subtask_index": 27 + }, + { + "subtask": "Use the right gripper to grab the Rubik's Cube on the right side of the table", + "subtask_index": 28 + }, + { + "subtask": "\nPass the milk to the left gripper\n", + "subtask_index": 29 + }, + { + "subtask": "The left gripper places purple garbage bag on the left side of the table", + "subtask_index": 30 + }, + { + "subtask": "null", + "subtask_index": 31 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower", + "handover", + "takeover" + ], + "robot_name": [ + "agilex_cobot_magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], "statistics": { - "total_episodes": 50, - "total_frames": 28998, + "total_episodes": 93, + "total_frames": 59541, "fps": 30, - "total_tasks": 7, - "total_videos": 150, + "total_tasks": 32, + "total_videos": 279, "total_chunks": 1, "chunks_size": 1000, "state_dim": 26, "action_dim": 26, "camera_views": 3, - "dataset_size": "249.99 MB" + "dataset_size": "1.15 GB" }, - "frame_num": 28998, - "dataset_size": "249.99 MB", - "data_structure": "Agilex_Cobot_Magic_erase_board_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 59541, + "dataset_size": "1.15 GB", + "data_structure": "Agilex_Cobot_Magic_pass_object_right_to_left_black_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (81 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:49" + "train": "0:92" }, "features": { "observation.images.cam_head_rgb": { @@ -44039,12 +41882,12 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "alpha_bot_2_stack_building_blocks": { - "path": "alpha_bot_2_stack_building_blocks", - "dataset_name": "stack_building_blocks", + "AIRBOT_MMK2_clean_the_desktop": { + "path": "AIRBOT_MMK2_clean_the_desktop", + "dataset_name": "clean_the_desktop", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "five_finger_hand" ], "scene_type": [], "atomic_actions": [ @@ -44052,7 +41895,7 @@ "pick", "place" ], - "tasks": "Grasp the blue building block with left gripper", + "tasks": "End", "objects": [ { "object_name": "table", @@ -44063,27 +41906,27 @@ "level5": null }, { - "object_name": "building_blocks", - "level1": "toys", - "level2": "building_blocks", + "object_name": "calculator_box", + "level1": "tools", + "level2": "umbrella", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-130049", - "dataset_size": "3.4GB", + "operation_platform_height": 77.2, + "frame_range": "0-8709", + "dataset_size": "327.5MB", "statistics": { - "total_episodes": 139, - "total_frames": 130049, + "total_episodes": 49, + "total_frames": 8709, "total_tasks": 1, - "total_videos": 556, + "total_videos": 196, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "f7781909-81c7-40ee-98a5-67783a48dc66", + "dataset_uuid": "05f361bc-b167-4623-b38b-a9371aa7d999", "language": [ "en", "zh" @@ -44092,22 +41935,11 @@ "robotics" ], "sub_tasks": [ - "Grasp the blue building block with left gripper", "End", - "Place the pink building block in the center of table with left gripper", - "Grasp the pink building block with left gripper", - "Place the blue building block on the pink building blocks with left gripper", - "Grasp the green building block with left gripper", - "Place the pink building block on the blue building blocks with right gripper", - "Place the green block on the pink block with left gripper", - "Place the blue building block on the blue building blocks with right gripper", - "Place the blue building block on the pink building blocks with right gripper", - "Grasp the blue building block with right gripper", - "Place the pink building block on the blue building blocks with left gripper", - "Place the blue building block on the blue building blocks with left gripper", - "Grasp the pink building block with right gripper", - "Place the pink block on the blue block with right gripper", - "Place the blue building block in the center of table with left gripper", + "Grasp the tissue with the right gripper", + "Place the umbrella on the white lid with the left gripper", + "Place the tissue on the white lid with the right gripper", + "Grasp the umbrella with the left gripper", "null" ], "annotations": { @@ -44145,944 +41977,622 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "alpha_bot_2_stack_building_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "alpha_bot_2_stack_building_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_clean_the_desktop_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_clean_the_desktop_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Cobot_Magic_catch_the_ball": { - "path": "Cobot_Magic_catch_the_ball", - "dataset_name": "catch_the_ball", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Put the picked-up object on the table.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "ball", - "level1": "toy", - "level2": "ball", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-57085", - "dataset_size": "877.6MB", - "statistics": { - "total_episodes": 98, - "total_frames": 57085, - "total_tasks": 1, - "total_videos": 294, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 50 - }, - "dataset_uuid": "3ea12ae6-b4f3-453d-b56f-bca0e5a346c8", - "language": [ - "en", - "zh" - ], + "Airbot_MMK2_take_bottle_umbrella": { "task_categories": [ "robotics" ], - "sub_tasks": [ - "Put the picked-up object on the table.", - "Grab the rolling spherical object.", - "Grab the spherical object with your left arm.", - "Place the sphere on the white object.", - "null" + "language": [ + "en" ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", "tags": [ "RoboCOIN", "LeRobot" ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_catch_the_ball_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_catch_the_ball_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Cobot_Magic_open_the_shoebox": { - "path": "Cobot_Magic_open_the_shoebox", - "dataset_name": "open_the_shoebox", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pull" - ], - "tasks": "Open the shoe box", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, + "license": "apache-2.0", + "configs": [ { - "object_name": "tablecloth", - "level1": "clothing", - "level2": "tablecloth", - "level3": null, - "level4": null, - "level5": null + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" } ], - "operation_platform_height": 77.2, - "frame_range": "0-185740", - "dataset_size": "2.8GB", - "statistics": { - "total_episodes": 299, - "total_frames": 185740, - "total_tasks": 3, - "total_videos": 897, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 50 - }, - "dataset_uuid": "1708df8d-97e4-42b0-a8ee-de1bb93b4d77", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Open the shoe box", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_take_bottle_umbrella", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_open_the_shoebox_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_open_the_shoebox_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "RMC-AIDA-L_pour_rice": { - "path": "RMC-AIDA-L_pour_rice", - "dataset_name": "pour_rice", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place", - "pour" - ], - "tasks": "Pick up the cup with rice in it with the left gripper", + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "empty_bottle", + "level1": "mineral_water", + "level2": "empty_bottle", "level3": null, "level4": null, "level5": null }, { - "object_name": "rice", - "level1": "food", - "level2": "rice", + "object_name": "umbrella", + "level1": "daily_necessities", + "level2": "umbrella", "level3": null, "level4": null, "level5": null }, { - "object_name": "grain_cup", - "level1": "container", - "level2": "grain_cup", + "object_name": "lid", + "level1": "laboratory_supplies", + "level2": "lid", "level3": null, "level4": null, "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "take the umbrella off the lid with right hand, and then remove the bottle from the lid with left hand." + ], + "sub_tasks": [ + { + "subtask": " Grasp the umbrella placed on the white lid with the left gripper", + "subtask_index": 0 }, { - "object_name": "rice_cooker", - "level1": "home_appliances", - "level2": "rice_cooker", - "level3": null, - "level4": null, - "level5": null + "subtask": "End", + "subtask_index": 1 + }, + { + "subtask": "Grasp the Yibao placed on the white lid with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the umbrella on the table with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Place the Yibao on the table with the left gripper", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 } ], - "operation_platform_height": 77.2, - "frame_range": "0-413739", - "dataset_size": "2.9GB", + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], "statistics": { - "total_episodes": 714, - "total_frames": 413739, - "total_tasks": 2, - "total_videos": 2142, + "total_episodes": 54, + "total_frames": 18663, + "fps": 30, + "total_tasks": 6, + "total_videos": 216, "total_chunks": 1, "chunks_size": 1000, - "fps": 30 + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "759.87 MB" }, - "dataset_uuid": "c79b49bf-cadf-4094-831d-bbf6403956c6", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Pick up the cup with rice in it with the left gripper", - "End", - "Move the bowl in the center of view with left gripper", - "Pick up the bowl with the left gripper", - "Place the cup on the table with the right gripper", - "Pour the rice from the cup into the bowl with the left gripper", - "Place the bowl in the center of view with the right gripper", - "Static", - "Place the cup on the table with the left gripper", - "Pour the rice from the cup into the bowl with the right gripper", - "Pick up the cup with rice in it with the right gripper", - "Pick up the bowl with the right gripper", - "Place the bowl in the center of view with the left gripper", - "Grasp the cup with rice in it with right gripper", - "abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" + "frame_num": 18663, + "dataset_size": "759.87 MB", + "data_structure": "Airbot_MMK2_take_bottle_umbrella_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (42 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:53" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } }, "authors": { "contributed_by": [ { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" } ] }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "Airbot_MMK2_storage_bottle_part": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], "tags": [ "RoboCOIN", "LeRobot" ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "RMC-AIDA-L_pour_rice_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_pour_rice_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AIRBOT_MMK2_bowl_storage_pepper": { - "path": "AIRBOT_MMK2_bowl_storage_pepper", - "dataset_name": "bowl_storage_pepper", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place the white bowl on the pink bowl with the left gripper", - "objects": [ + "license": "apache-2.0", + "configs": [ { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_storage_bottle_part", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "commercial_convenience", + "level2": "supermarket", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ { - "object_name": "chili_pepper", - "level1": "vegetables", - "level2": "chili_pepper", + "object_name": "water_bottle", + "level1": "beverages", + "level2": "water_bottle", "level3": null, "level4": null, "level5": null }, { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", + "object_name": "handle", + "level1": "laboratory_supplies", + "level2": "handle", "level3": null, "level4": null, "level5": null }, { - "object_name": "pumpkin", - "level1": "vegetables", - "level2": "pumpkin", + "object_name": "carton", + "level1": "laboratory_supplies", + "level2": "carton", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-19594", - "dataset_size": "715.7MB", - "statistics": { - "total_episodes": 150, - "total_frames": 19594, - "total_tasks": 3, - "total_videos": 600, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "83f67fd9-c480-4786-ae1d-41475a4f4618", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "put the water bottle and handle into the cardboard box." ], "sub_tasks": [ - "Place the white bowl on the pink bowl with the left gripper", - "Grasp the white bowl with the left gripper", - "Static", - "End", - "Abnormal", - "Grasp the pumpkin with the left gripper", - "Place the pumpkin on the pink bowl with the left gripper", - "Grasp the yellow round chili pepper with the right gripper", - "Place yellow round chili pepper on the blue bowl with the right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_bowl_storage_pepper_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_bowl_storage_pepper_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "R1_Lite_move_the_position_of_the_cookie": { - "path": "R1_Lite_move_the_position_of_the_cookie", - "dataset_name": "move_the_position_of_the_cookie", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "place", - "pick", - "grasp" - ], - "tasks": "Place the cookie on the table with right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pen", - "level1": "office_supplies", - "level2": "pen", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "fruit", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bath_ball", - "level1": "daily_necessities", - "level2": "bath_ball", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "can", - "level1": "container", - "level2": "can", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "eraser", - "level1": "office_supplies", - "level2": "eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "hard_cleanser", - "level1": "daily_necessities", - "level2": "hard_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "long_bread", - "level1": "food", - "level2": "long_bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peeler", - "level1": "tool", - "level2": "peeler", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "block", - "level1": "toy", - "level2": "block", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "duck", - "level1": "toy", - "level2": "duck", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soap", - "level1": "daily_necessities", - "level2": "soap", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "container", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "basket", - "level1": "container", - "level2": "basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cola", - "level1": "drink", - "level2": "cola", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "detergent", - "level1": "daily_necessities", - "level2": "detergent", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_beater", - "level1": "electrical_appliances", - "level2": "egg_beater", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "towel", - "level1": "clothing", - "level2": "towel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "orange", - "level1": "fruit", - "level2": "orange", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peach", - "level1": "fruit", - "level2": "peach", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "marker", - "level1": "office_supplies", - "level2": "marker", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "rubiks_cube", - "level1": "toy", - "level2": "rubiks_cube", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bread_slice", - "level1": "food", - "level2": "bread_slice", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "brush", - "level1": "daily_necessities", - "level2": "brush", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yogurt", - "level1": "drink", - "level2": "yogurt", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "power_strip", - "level1": "electric_appliance", - "level2": "power_strip", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "milk", - "level1": "drink", - "level2": "milk", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soda", - "level1": "drink", - "level2": "soda", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "lime", - "level1": "fruit", - "level2": "lime", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "coffee_capsule", - "level1": "drink", - "level2": "coffee_capsule", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "dish", - "level1": "container", - "level2": "dish", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "glass", - "level1": "furniture", - "level2": "glass", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_yolk_pastry", - "level1": "food", - "level2": "egg_yolk_pastry", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "glasses_case", - "level1": "daily_necessities", - "level2": "glasses_case", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "gum", - "level1": "daily_necessities", - "level2": "gum", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tape", - "level1": "daily_necessities", - "level2": "tape", - "level3": null, - "level4": null, - "level5": null - }, { - "object_name": "soft_cleanser", - "level1": "daily_necessities", - "level2": "soft_cleanser", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the water bottle with the right gripper", + "subtask_index": 0 }, { - "object_name": "chips", - "level1": "food", - "level2": "chips", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the water bottle into the cardboard box with the right gripper", + "subtask_index": 1 }, { - "object_name": "chocolate", - "level1": "food", - "level2": "chocolate", - "level3": null, - "level4": null, - "level5": null + "subtask": "Lift the water bottle with the right gripper", + "subtask_index": 2 }, { - "object_name": "cookie", - "level1": "food", - "level2": "cookie", - "level3": null, - "level4": null, - "level5": null + "subtask": "Lift the remote control clip with the left gripper ", + "subtask_index": 3 }, { - "object_name": "spoon", - "level1": "tableware", - "level2": "spoon", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-5045", - "dataset_size": "198.3MB", - "statistics": { - "total_episodes": 25, - "total_frames": 5045, - "total_tasks": 1, - "total_videos": 100, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "7015670f-e702-4c08-a662-e74aee0c0872", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the cookie on the table with right gripper", - "Static", - "Place the cookie on the table with left gripper", - "End", - "Grasp the cookie with right gripper", - "Grasp the cookie with left gripper", - "Abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_move_the_position_of_the_cookie_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_move_the_position_of_the_cookie_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Airbot_MMK2_move_tub": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } - ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + "subtask": "Abnormal", + "subtask_index": 4 }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_move_tub", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "household", - "level2": "kitchen", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ - { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "pass the plate to your right hand with your left hand and then put it down." - ], - "sub_tasks": [ { - "subtask": "Grasp the white box with left gripper", - "subtask_index": 0 + "subtask": "Static", + "subtask_index": 5 }, { - "subtask": "Place the white box in the center of table with right gripper", - "subtask_index": 1 + "subtask": "Place the remote control clip into the cardboard box with the left gripper", + "subtask_index": 6 }, { "subtask": "End", - "subtask_index": 2 + "subtask_index": 7 }, { - "subtask": "Pass the white box to right gripper", - "subtask_index": 3 + "subtask": "Grasp the remote control clip with the left gripper ", + "subtask_index": 8 }, { "subtask": "null", - "subtask_index": 4 + "subtask_index": 9 } ], "atomic_actions": [ @@ -45122,23 +42632,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 37, - "total_frames": 11634, + "total_episodes": 49, + "total_frames": 10499, "fps": 30, - "total_tasks": 5, - "total_videos": 148, + "total_tasks": 10, + "total_videos": 196, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "340.31 MB" + "dataset_size": "508.16 MB" }, - "frame_num": 11634, - "dataset_size": "340.31 MB", - "data_structure": "Airbot_MMK2_move_tub_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (25 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 10499, + "dataset_size": "508.16 MB", + "data_structure": "Airbot_MMK2_storage_bottle_part_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:36" + "train": "0:48" }, "features": { "observation.images.cam_head_rgb": { @@ -45492,19 +43002,20 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "G1edu-u3_put_the_lemon_af": { - "path": "G1edu-u3_put_the_lemon_af", - "dataset_name": "put_the_lemon_af", + "AIRBOT_MMK2_mobile_calculator_box": { + "path": "AIRBOT_MMK2_mobile_calculator_box", + "dataset_name": "mobile_calculator_box", "robot_type": "", "end_effector_type": [ - "three_finger_hand" + "five_finger_hand" ], "scene_type": [], "atomic_actions": [ "grasp", - "place" + "place", + "pick" ], - "tasks": "End", + "tasks": "Grasp the calculator case with left gripper", "objects": [ { "object_name": "table", @@ -45515,27 +43026,35 @@ "level5": null }, { - "object_name": "lemon", - "level1": "fruits", - "level2": "lemon", + "object_name": "calculator_box", + "level1": "container", + "level2": "calculator_box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "lid", + "level1": "container", + "level2": "lid", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-3544", - "dataset_size": "37.1MB", + "operation_platform_height": 77.2, + "frame_range": "0-34396", + "dataset_size": "1.1GB", "statistics": { - "total_episodes": 19, - "total_frames": 3544, - "total_tasks": 1, - "total_videos": 19, + "total_episodes": 150, + "total_frames": 34396, + "total_tasks": 2, + "total_videos": 600, "total_chunks": 1, - "chunks_size": 19, + "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "c37586ed-bc01-40b2-9038-cd26ce06df5b", + "dataset_uuid": "7629d634-2d89-41ad-a722-364a319f374b", "language": [ "en", "zh" @@ -45544,9 +43063,16 @@ "robotics" ], "sub_tasks": [ + "Grasp the calculator case with left gripper", "End", - "Place the lemon on the table with right gripper", - "Place the lemon on the table with left gripper", + "Place the calculator case in the center of table with right gripper", + "Place the white box in the center of table with right gripper", + "Pass the white box to right gripper", + "Grasp the yellow book with left gripper", + "Place the yellow book in the center of table with right gripper", + "Pass the yellow book to right gripper", + "Pass the calculator case to right gripper", + "Grasp the white box with left gripper", "null" ], "annotations": { @@ -45584,10 +43110,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "G1edu-u3_put_the_lemon_af_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_put_the_lemon_af_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_mobile_calculator_box_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_mobile_calculator_box_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_take_drink": { + "Airbot_MMK2_turn_page": { "task_categories": [ "robotics" ], @@ -45617,11 +43143,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_take_drink", + "dataset_name": "Airbot_MMK2_turn_page", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "convenience", - "level2": "supermarket", + "level1": "household", + "level2": "study_room", "level3": null, "level4": null, "level5": null @@ -45629,17 +43155,9 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "any_beverages", - "level1": "beverages", - "level2": "any_beverages", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "paper_cup", - "level1": "cups", - "level2": "paper_cup", + "object_name": "book", + "level1": "stationery", + "level2": "books", "level3": null, "level4": null, "level5": null @@ -45647,54 +43165,28 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the drink by hand and put it on the table." + "turn a page of the english book on the table with right hand." ], "sub_tasks": [ { - "subtask": "Place the coffee on the table with the left gripper", + "subtask": "End", "subtask_index": 0 }, { - "subtask": "Place the vitamin B water on the table with the left gripper", + "subtask": "Turn the book to the next page with the right gripper", "subtask_index": 1 }, { "subtask": "Abnormal", "subtask_index": 2 }, - { - "subtask": "Place the vitamin B water on the table with the right gripper", - "subtask_index": 3 - }, - { - "subtask": "Place the coffee on the table with the right gripper", - "subtask_index": 4 - }, - { - "subtask": "Grasp the vitamin B water on the white lid with the left gripper", - "subtask_index": 5 - }, - { - "subtask": "Grasp the vitamin B water on the white lid with the right gripper", - "subtask_index": 6 - }, - { - "subtask": "End", - "subtask_index": 7 - }, - { - "subtask": "Grasp the coffee on the white lid with the right gripper", - "subtask_index": 8 - }, { "subtask": "null", - "subtask_index": 9 + "subtask_index": 3 } ], "atomic_actions": [ - "grasp", - "pick", - "place" + "flip" ], "robot_name": [ "Airbot_MMK2" @@ -45728,23 +43220,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 86, - "total_frames": 17345, + "total_episodes": 149, + "total_frames": 19581, "fps": 30, - "total_tasks": 10, - "total_videos": 344, + "total_tasks": 4, + "total_videos": 596, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "570.83 MB" + "dataset_size": "740.88 MB" }, - "frame_num": 17345, - "dataset_size": "570.83 MB", - "data_structure": "Airbot_MMK2_take_drink_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (74 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 19581, + "dataset_size": "740.88 MB", + "data_structure": "Airbot_MMK2_turn_page_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (137 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:85" + "train": "0:148" }, "features": { "observation.images.cam_head_rgb": { @@ -46098,1310 +43590,148 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Cobot_Magic_place_square_pyramid": { - "path": "Cobot_Magic_place_square_pyramid", - "dataset_name": "place_square_pyramid", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" + "Agilex_Cobot_Magic_move_mouse_pen_red_tablecloth": { + "task_categories": [ + "robotics" ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" + "language": [ + "en" ], - "tasks": "Place the block onto the cube-shaped block", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_move_mouse_pen_red_tablecloth", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "office & workspace", + "level2": "office", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { "object_name": "table", - "level1": "furniture", + "level1": "home_storage", "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "cube_block", - "level1": "toy", - "level2": "cube_block", + "object_name": "deli_water-based_marker", + "level1": "stationery", + "level2": "deli_water-based_marker", "level3": null, "level4": null, "level5": null }, { - "object_name": "square_pyramid", - "level1": "toy", - "level2": "square_pyramid", + "object_name": "notebook", + "level1": "stationery", + "level2": "notebook", "level3": null, "level4": null, "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-25851", - "dataset_size": "521.2MB", - "statistics": { - "total_episodes": 99, - "total_frames": 25851, - "total_tasks": 1, - "total_videos": 297, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "53a89b6b-14be-4a0e-8495-f3310b2ba5bd", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the block onto the cube-shaped block", - "Place the triangular block onto the cube block", - "End", - "Grasp the triangular block", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_place_square_pyramid_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_place_square_pyramid_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "alpha_bot_2_carry_the_clothes_basket": { - "path": "alpha_bot_2_carry_the_clothes_basket", - "dataset_name": "carry_the_clothes_basket", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Move the basket above the other table", - "objects": [ + }, { - "object_name": "clothes_basket", - "level1": "container", - "level2": "clothes_basket", + "object_name": "mouse", + "level1": "appliances", + "level2": "mouse", "level3": null, "level4": null, "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-46527", - "dataset_size": "2.5GB", - "statistics": { - "total_episodes": 50, - "total_frames": 46527, - "total_tasks": 1, - "total_videos": 200, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "0df48917-c10a-469f-8f74-d57f284c09cd", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Move the basket above the other table", - "Grasp the basket with both grippers", - "Grasp the basket with left gripper", - "Grasp the basket with right gripper", - "End", - "Place the basket on the table with right gripper", - "Place the basket on the table with both grippers", - "Place the basket on the table with left gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "alpha_bot_2_carry_the_clothes_basket_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "alpha_bot_2_carry_the_clothes_basket_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AIRBOT_MMK2_place_the_basin": { - "path": "AIRBOT_MMK2_place_the_basin", - "dataset_name": "place_the_basin", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Static", - "objects": [ + }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "mouse_pad", + "level1": "appliances", + "level2": "mouse_pad", "level3": null, "level4": null, "level5": null }, { - "object_name": "basin", - "level1": "container", - "level2": "basin", + "object_name": "red_table_cloths", + "level1": "laboratory_supplies", + "level2": "red_table_cloths", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-7672", - "dataset_size": "272.9MB", - "statistics": { - "total_episodes": 50, - "total_frames": 7672, - "total_tasks": 1, - "total_videos": 200, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "14a514fa-2a43-4b79-bcf6-c7d01e427623", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Static", - "Place the basin on the table with the right gripper", - "Grasp the basin with the right gripper", - "End", - "Lift the basin with the left gripper ", - "Grasp the basin with the left gripper ", - "Lift the basin with the right gripper", - "Place the basin on the table with the left gripper ", - "Abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_place_the_basin_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_place_the_basin_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "G1edu-u3_basket_storage_apple_b": { - "path": "G1edu-u3_basket_storage_apple_b", - "dataset_name": "basket_storage_apple_b", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "pick", - "place" - ], - "tasks": "End", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "basket", - "level1": "home_storage", - "level2": "basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "apple", - "level1": "fruit", - "level2": "apple", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-118352", - "dataset_size": "2.3GB", - "statistics": { - "total_episodes": 241, - "total_frames": 118352, - "total_tasks": 1, - "total_videos": 723, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "8c0ab1ee-eb0f-48a0-a7be-ff698d2f4651", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "End", - "Place the grapes in the plate with left hand", - "Grasp the grapes with left hand", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_basket_storage_apple_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_basket_storage_apple_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Galaxea_R1_Lite_classify_object_six": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } - ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_classify_object_six", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "commercial_convenience", - "level2": "supermarket", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ - { - "object_name": "brown_basket", - "level1": "baskets", - "level2": "brown_basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yellow_basket", - "level1": "baskets", - "level2": "yellow_basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "any_fruits", - "level1": "fruits", - "level2": "any_fruits", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "any_vegetables", - "level1": "vegetables", - "level2": "any_vegetables", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "any_snacks", - "level1": "snacks", - "level2": "any_snacks", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "any_bread", - "level1": "bread", - "level2": "any_bread", - "level3": null, - "level4": null, - "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "place the food in the right basket with the right gripper, and place the non food items in the left basket with the left gripper." + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "the left gripper organize the pen on the notebook then the right gripper organize the mouse on the mouse pad." ], "sub_tasks": [ { - "subtask": "Grasp the round bread and put it in the right basket", + "subtask": "Right gripper", "subtask_index": 0 }, { - "subtask": "Grasp the small basket containing bread and put it in the right basket", + "subtask": "Pick up the mouse with right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the lemon and put it in the right basket", + "subtask": "Pick up the marker pen with right gripper", "subtask_index": 2 }, { - "subtask": "Grasp the egg yolk pastry and put it in the right basket", + "subtask": "Pick up the mouse with left gripper", "subtask_index": 3 }, { - "subtask": "Grasp the can and put it in the right basket", + "subtask": "End", "subtask_index": 4 }, { - "subtask": "Grasp the apple and put it in the right basket", + "subtask": "Place the marker pen on the notebook with right gripper", "subtask_index": 5 }, { - "subtask": "Grasp the long bread and put it in the right basket", + "subtask": "Place the mouse on the mouse pad with left gripper", "subtask_index": 6 }, { - "subtask": "Grasp the canned cola and put it in the right basket", + "subtask": "Place the marker pen on the notebook with left gripper", "subtask_index": 7 }, { - "subtask": "Grasp the back scratcher and put it in the left basket", + "subtask": "Left gripper", "subtask_index": 8 }, { - "subtask": "Grasp the glasses case and put it in the left basket", + "subtask": "Pick up the marker pen with left gripper", "subtask_index": 9 }, { - "subtask": "Grasp the peeler and put it in the left basket", + "subtask": "Place the mouse on the mouse pad with right gripper", "subtask_index": 10 }, - { - "subtask": "Grasp the cookie and put it in the right basket", - "subtask_index": 11 - }, - { - "subtask": "Grasp the yellow duck and put it in the right basket", - "subtask_index": 12 - }, - { - "subtask": "Grasp the cleaning agent and put it in the left basket", - "subtask_index": 13 - }, - { - "subtask": "Grasp the soft cleanser and put it in the left basket", - "subtask_index": 14 - }, - { - "subtask": "Grasp the waffle and put it in the right basket", - "subtask_index": 15 - }, - { - "subtask": "Grasp the grey towel and put it in the left basket", - "subtask_index": 16 - }, - { - "subtask": "Grasp the orange and put it in the right basket", - "subtask_index": 17 - }, - { - "subtask": "Grasp the brown towel and put it in the left basket", - "subtask_index": 18 - }, - { - "subtask": "Grasp the pen container and put it in the left basket", - "subtask_index": 19 - }, - { - "subtask": "Grasp the hard cleanser and put it in the left basket", - "subtask_index": 20 - }, - { - "subtask": "Grasp the canned cola and put it in the left basket", - "subtask_index": 21 - }, - { - "subtask": "Grasp the triangle cake and put it in the right basket", - "subtask_index": 22 - }, - { - "subtask": "Grasp the shower sphere and put it in the left basket", - "subtask_index": 23 - }, - { - "subtask": "Grasp the rubiks cube and put it in the right basket", - "subtask_index": 24 - }, - { - "subtask": "Grasp the broom and put it in the left basket", - "subtask_index": 25 - }, - { - "subtask": "Grasp the white eraser and put it in the left basket", - "subtask_index": 26 - }, - { - "subtask": "Grasp the milk and put it in the right basket", - "subtask_index": 27 - }, - { - "subtask": "Grasp the lime and put it in the right basket", - "subtask_index": 28 - }, - { - "subtask": "Grasp the brown towel and put it in the right basket", - "subtask_index": 29 - }, - { - "subtask": "Grasp the duck toys and put it in the left basket", - "subtask_index": 30 - }, - { - "subtask": "Grasp the power strip and put it in the left basket", - "subtask_index": 31 - }, - { - "subtask": "Grasp the red duck and put it in the left basket", - "subtask_index": 32 - }, - { - "subtask": "Grasp the shampoo and put it in the left basket", - "subtask_index": 33 - }, - { - "subtask": "Grasp the yellow duck and put it in the left basket", - "subtask_index": 34 - }, - { - "subtask": "Grasp the peach and put it in the right basket", - "subtask_index": 35 - }, - { - "subtask": "Abnormal", - "subtask_index": 36 - }, - { - "subtask": "Grasp the tea cup and put it in the left basket", - "subtask_index": 37 - }, - { - "subtask": "Grasp the pink marker and put it in the left basket", - "subtask_index": 38 - }, - { - "subtask": "Grasp the brush and put it in the left basket", - "subtask_index": 39 - }, - { - "subtask": "Grasp the washing liquid and put it in the left basket", - "subtask_index": 40 - }, - { - "subtask": "Place the rubiks cube in the center of the table", - "subtask_index": 41 - }, - { - "subtask": "Grasp the soap and put it in the left basket", - "subtask_index": 42 - }, - { - "subtask": "Grasp the grey towel and put it in the right basket", - "subtask_index": 43 - }, - { - "subtask": "Grasp the blue marker and put it in the left basket", - "subtask_index": 44 - }, - { - "subtask": "Grasp the black glass cup and put it in the left basket", - "subtask_index": 45 - }, - { - "subtask": "Grasp the bath ball and put it in the left basket", - "subtask_index": 46 - }, - { - "subtask": "Grasp the coke and put it in the right basket", - "subtask_index": 47 - }, - { - "subtask": "Grasp the potato chips and put it in the right basket", - "subtask_index": 48 - }, - { - "subtask": "Grasp the ballpoint pen and put it in the left basket", - "subtask_index": 49 - }, - { - "subtask": "Grasp the rubiks cube and put it in the left basket", - "subtask_index": 50 - }, - { - "subtask": "Grasp the square chewing gum and put it in the right basket", - "subtask_index": 51 - }, - { - "subtask": "Grasp the glasses case and put it in the right basket", - "subtask_index": 52 - }, - { - "subtask": "Grasp the banana and put it in the right basket", - "subtask_index": 53 - }, - { - "subtask": "Grasp the ad milk and put it in the right basket", - "subtask_index": 54 - }, - { - "subtask": "Grasp the soda water and put it in the right basket", - "subtask_index": 55 - }, - { - "subtask": "Grasp the peach doll and put it in the right basket", - "subtask_index": 56 - }, - { - "subtask": "Grasp the spoon and put it in the left basket", - "subtask_index": 57 - }, - { - "subtask": "Grasp the blue marker and put it in the right basket", - "subtask_index": 58 - }, - { - "subtask": "Grasp the blue cup and put it in the left basket", - "subtask_index": 59 - }, - { - "subtask": "Grasp the compass and put it in the left basket", - "subtask_index": 60 - }, - { - "subtask": "Grasp the chocolate and put it in the right basket", - "subtask_index": 61 - }, - { - "subtask": "Grasp the compass and put it in the right basket", - "subtask_index": 62 - }, - { - "subtask": "End", - "subtask_index": 63 - }, - { - "subtask": "Grasp the tape and put it in the left basket", - "subtask_index": 64 - }, - { - "subtask": "Grasp the peeler and put it in the right basket", - "subtask_index": 65 - }, - { - "subtask": "Grasp the yogurt and put it in the right basket", - "subtask_index": 66 - }, - { - "subtask": "Grasp the green lemon and put it in the right basket", - "subtask_index": 67 - }, - { - "subtask": "Grasp the black marker and put it in the left basket", - "subtask_index": 68 - }, - { - "subtask": "Grasp the round chewing gum and put it in the right basket", - "subtask_index": 69 - }, - { - "subtask": "Grasp the bread slice and put it in the right basket", - "subtask_index": 70 - }, - { - "subtask": "Grasp the square chewing gum and put it in the left basket", - "subtask_index": 71 - }, - { - "subtask": "null", - "subtask_index": 72 - } - ], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "robot_name": [ - "Galaxea_R1_Lite" - ], - "end_effector_type": "two_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb" - ], - "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" - }, - "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" - ], - "statistics": { - "total_episodes": 105, - "total_frames": 86401, - "fps": 30, - "total_tasks": 73, - "total_videos": 420, - "total_chunks": 1, - "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, - "camera_views": 4, - "dataset_size": "5.25 GB" - }, - "frame_num": 86401, - "dataset_size": "5.25 GB", - "data_structure": "Galaxea_R1_Lite_classify_object_six_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (93 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", - "splits": { - "train": "0:104" - }, - "features": { - "observation.images.cam_head_left_rgb": { - "dtype": "video", - "shape": [ - 720, - 1280, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 720, - "video.width": 1280, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_head_right_rgb": { - "dtype": "video", - "shape": [ - 720, - 1280, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 720, - "video.width": 1280, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_left_wrist_rgb": { - "dtype": "video", - "shape": [ - 720, - 1280, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 720, - "video.width": 1280, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_right_wrist_rgb": { - "dtype": "video", - "shape": [ - 720, - 1280, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 720, - "video.width": 1280, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.state": { - "dtype": "float32", - "shape": [ - 14 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" - ] - }, - "action": { - "dtype": "float32", - "shape": [ - 14 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" - ] - }, - "timestamp": { - "dtype": "float32", - "shape": [ - 1 - ], - "names": null - }, - "frame_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "episode_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "task_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "subtask_annotation": { - "names": null, - "shape": [ - 5 - ], - "dtype": "int32" - }, - "scene_annotation": { - "names": null, - "shape": [ - 1 - ], - "dtype": "int32" - }, - "eef_sim_pose_state": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_sim_pose_action": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_direction_action": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - } - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" - } - ] - }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" - }, - "Agilex_Cobot_Magic_storage_orange_basket_left": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } - ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_storage_orange_basket_left", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "household", - "level2": "living_room", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ - { - "object_name": "table", - "level1": "home_storage", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "brown_basket", - "level1": "home_storage", - "level2": "brown_basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "orange", - "level1": "food", - "level2": "orange", - "level3": null, - "level4": null, - "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "left gripper grabs oranges and puts them in the basket." - ], - "sub_tasks": [ - { - "subtask": "End", - "subtask_index": 0 - }, - { - "subtask": "Place the orange in the basket with left gripper", - "subtask_index": 1 - }, - { - "subtask": "Abnormal", - "subtask_index": 2 - }, - { - "subtask": "Grasp the orange with left gripper", - "subtask_index": 3 - }, { "subtask": "null", - "subtask_index": 4 + "subtask_index": 11 } ], "atomic_actions": [ @@ -47439,23 +43769,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 99, - "total_frames": 40181, + "total_episodes": 30, + "total_frames": 20689, "fps": 30, - "total_tasks": 5, - "total_videos": 297, + "total_tasks": 12, + "total_videos": 90, "total_chunks": 1, "chunks_size": 1000, "state_dim": 26, "action_dim": 26, "camera_views": 3, - "dataset_size": "488.80 MB" + "dataset_size": "664.79 MB" }, - "frame_num": 40181, - "dataset_size": "488.80 MB", - "data_structure": "Agilex_Cobot_Magic_storage_orange_basket_left_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 20689, + "dataset_size": "664.79 MB", + "data_structure": "Agilex_Cobot_Magic_move_mouse_pen_red_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (18 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:98" + "train": "0:29" }, "features": { "observation.images.cam_head_rgb": { @@ -47826,20 +44156,20 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "leju_robot_moving_parts_i": { - "path": "leju_robot_moving_parts_i", - "dataset_name": "moving_parts_i", + "AgiBot-g1_mobile_accessory_storage_box_d": { + "path": "AgiBot-g1_mobile_accessory_storage_box_d", + "dataset_name": "mobile_accessory_storage_box_d", "robot_type": "", "end_effector_type": [ - "five_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ "grasp", - "pick", - "place" + "place", + "pick" ], - "tasks": "Move to the initial position of the workbench.", + "tasks": "Grasp the mouse and data cable", "objects": [ { "object_name": "table", @@ -47850,15 +44180,341 @@ "level5": null }, { - "object_name": "parts", - "level1": "mechanical_parts", - "level2": "parts", + "object_name": "box", + "level1": "container", + "level2": "box", "level3": null, "level4": null, "level5": null }, { - "object_name": "cabinet", + "object_name": "accessories", + "level1": "electronic_products", + "level2": "accessories", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-17860", + "dataset_size": "7.6GB", + "statistics": { + "total_episodes": 30, + "total_frames": 17860, + "total_tasks": 1, + "total_videos": 240, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "add3803d-befc-4ae5-8bc7-b6d81060cdcf", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the mouse and data cable", + "Place the mouse and data cable in the paper box", + "End", + "Place the mouse in the paper box", + "Grab and pick up the mouse and power cord from the accessory packaging area.", + "Abnormal", + "Place the mouse and power cord into the box.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AgiBot-g1_mobile_accessory_storage_box_d_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_mobile_accessory_storage_box_d_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "alpha_bot_2_pass_the_sandbag": { + "path": "alpha_bot_2_pass_the_sandbag", + "dataset_name": "pass_the_sandbag", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Pass the paper ball to the right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "sandbag", + "level1": "toy", + "level2": "sandbag", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-69956", + "dataset_size": "2.0GB", + "statistics": { + "total_episodes": 86, + "total_frames": 69956, + "total_tasks": 1, + "total_videos": 344, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "81e68aa6-8f11-4edf-9c00-cc322e4fc0c9", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Pass the paper ball to the right gripper", + "End", + "Place the paper ball on the table with right gripper", + "Grasp the paper ball with left gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "alpha_bot_2_pass_the_sandbag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "alpha_bot_2_pass_the_sandbag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "leju_robot_box_storage_parcel": { + "path": "leju_robot_box_storage_parcel", + "dataset_name": "box_storage_parcel", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Grasp the package on the scanner with left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "box", + "level1": "home_storage", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "parcel", + "level1": "container", + "level2": "parcel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "conveyor_belt", + "level1": "industrial_equipment", + "level2": "conveyor_belt", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-537851", + "dataset_size": "27.2GB", + "statistics": { + "total_episodes": 443, + "total_frames": 537851, + "total_tasks": 1, + "total_videos": 1329, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "b6a8ebae-9a8f-44c4-a7a1-7893c558480b", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the package on the scanner with left gripper", + "Abnormal", + "Put the parcel inside the courier box.", + "Place the parcel onto the inbound machine.", + "Place the parcel into the delivery box.", + "Grasp the package on the conveyor belt with right gripper", + "Place the package in the green box with left gripper", + "End", + "Pick up the parcel from the conveyor belt.", + "Take the parcel off the conveyor belt.", + "Place the package on the scanner with right gripper", + "Pick up the parcel from the inbound machine.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_box_storage_parcel_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_box_storage_parcel_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "leju_robot_moving_parts_s": { + "path": "leju_robot_moving_parts_s", + "dataset_name": "moving_parts_s", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Return to the initial position at the shelf", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "parts", + "level1": "mechanical_parts", + "level2": "parts", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cabinet", "level1": "home_storage", "level2": "cabinet", "level3": null, @@ -47867,18 +44523,18 @@ } ], "operation_platform_height": null, - "frame_range": "0-229727", - "dataset_size": "14.0GB", + "frame_range": "0-776972", + "dataset_size": "53.8GB", "statistics": { - "total_episodes": 155, - "total_frames": 229727, + "total_episodes": 495, + "total_frames": 776972, "total_tasks": 1, - "total_videos": 465, + "total_videos": 1485, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "c8b01b9f-9806-4a5a-a895-01df2cac436f", + "dataset_uuid": "4f3b96f0-482a-45ca-9176-d8cdf62101c7", "language": [ "en", "zh" @@ -47887,16 +44543,112 @@ "robotics" ], "sub_tasks": [ - "Move to the initial position of the workbench.", - "Grasp the gray part with right gripper", - "Insert the small component into the corresponding slot on the workbench.", + "Return to the initial position at the shelf", + "Grasp the black part with right gripper", + "End", + "Abnormal", + "Pick up the large material from the shelf", + "Place the black part on the table with right gripper", "Move to the table behind body", - "Grasp the white part with right gripper", - "Place the gray part on the table with right gripper", - "Place the white part on the table with right gripper", + "Move the large material to the workbench", + "Insert the large material into the corresponding slot on the workbench", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_moving_parts_s_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_moving_parts_s_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "AIRBOT_MMK2_clean_the_desktop_a": { + "path": "AIRBOT_MMK2_clean_the_desktop_a", + "dataset_name": "clean_the_desktop_a", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the calculator case in the center of table with right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "calculator_box", + "level1": "tools", + "level2": "umbrella", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-9819", + "dataset_size": "338.3MB", + "statistics": { + "total_episodes": 50, + "total_frames": 9819, + "total_tasks": 1, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "05f361bc-b167-4623-b38b-a9371aa7d998", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the calculator case in the center of table with right gripper", + "Grasp the calculator case with left gripper", + "Pass the calculator case to right gripper", "End", - "Pick up the small component from the shelf.", - "Bring the small component to the front of the workbench.", "null" ], "annotations": { @@ -47934,10 +44686,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "leju_robot_moving_parts_i_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_moving_parts_i_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_clean_the_desktop_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_clean_the_desktop_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Galaxea_R1_Lite_pour_water_black_tablecloth": { + "agilex_cobot_magic_pass_object_left_to_right_khaki_tablecloth": { "task_categories": [ "robotics" ], @@ -47967,11 +44719,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_pour_water_black_tablecloth", + "dataset_name": "agilex_cobot_magic_pass_object_left_to_right_khaki_tablecloth", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "living_room", + "level1": "commercial_convenience", + "level2": "supermarket", "level3": null, "level4": null, "level5": null @@ -47979,25 +44731,145 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "black_table_cloth", - "level1": "table_cloths", - "level2": "black_table_cloth", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "any_cup", - "level1": "cups", - "level2": "any_cup", + "object_name": "ambrosial_yogurt", + "level1": "food", + "level2": "ambrosial_yogurt", "level3": null, "level4": null, "level5": null }, { - "object_name": "water", - "level1": "beverages", - "level2": "water", + "object_name": "banana", + "level1": "food", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "long_bread", + "level1": "food", + "level2": "long_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "milk", + "level1": "food", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yogurt", + "level1": "food", + "level2": "yogurt", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "grape", + "level1": "food", + "level2": "grape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ham_sausage", + "level1": "food", + "level2": "ham_sausage", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eggplant", + "level1": "food", + "level2": "eggplant", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eyeglass_case", + "level1": "laboratory_supplies", + "level2": "eyeglass_case", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rubik's_cube", + "level1": "toys", + "level2": "rubik's_cube", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "purple_trash_bag", + "level1": "trash", + "level2": "purple_trash_bag", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cleanser", + "level1": "daily_necessities", + "level2": "cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_necessities", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "whiteboard_eraser", + "level1": "stationery", + "level2": "whiteboard_eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "candle", + "level1": "daily_necessities", + "level2": "candle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "khaki_table_cloths", + "level1": "laboratory_supplies", + "level2": "khaki_table_cloths", "level3": null, "level4": null, "level5": null @@ -48005,214 +44877,205 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "use the gripper to pour the water into the other two cups on black table." + "use the left gripper to pick up the item and transfer it from the left gripper to the right gripper." ], "sub_tasks": [ { - "subtask": "Pick up the gray plastic cup with left gripper", + "subtask": "Pass the purple garbage bag to the right gripper", "subtask_index": 0 }, { - "subtask": "Pour water from blue cup to white cup with the left gripper", + "subtask": "Pass the shower sphere to the right gripper", "subtask_index": 1 }, { - "subtask": "Pour water from white cup to blue cup with the right gripper", + "subtask": "Place the milk on the table with the right gripper", "subtask_index": 2 }, { - "subtask": "Grasp the green cup with the left gripper", + "subtask": "Place the XX on the table with the right gripper", "subtask_index": 3 }, { - "subtask": "Place the blue plastic cup on the table with left gripper", + "subtask": "Pass the yogurt to the right gripper", "subtask_index": 4 }, { - "subtask": "Pour water from blue cup to green cup with the right gripper", + "subtask": "End", "subtask_index": 5 }, { - "subtask": "Pour water from white cup to green cup with the right gripper", + "subtask": "Pass the milk to the right gripper", "subtask_index": 6 }, { - "subtask": "Left gripper", + "subtask": "Place the purple garbage bag on the table with the right gripper", "subtask_index": 7 }, { - "subtask": "Pick up the green plastic cup with right gripper", + "subtask": "Place the Rubik's Cube on the table with the right gripper", "subtask_index": 8 }, { - "subtask": "Pour water from blue cup to green cup with the left gripper", + "subtask": "Grasp the blue blackboard erasure with the left gripper", "subtask_index": 9 }, { - "subtask": "Grasp the blue cup with the left gripper", + "subtask": "Grasp the shower sphere with the left gripper", "subtask_index": 10 }, { - "subtask": "Pour water into the green plastic cup with right gripper", + "subtask": "Grasp the plush banana with the left gripper", "subtask_index": 11 }, { - "subtask": "Pour water into the blue plastic cup with left gripper", + "subtask": "Grasp the yogurt with the left gripper", "subtask_index": 12 }, { - "subtask": "Place the white cup with the right gripper", + "subtask": "Grasp the milk with the left gripper\n", "subtask_index": 13 }, { - "subtask": "Grasp the white cup with the right gripper", + "subtask": "Pass the plush banana to the right gripper", "subtask_index": 14 }, { - "subtask": "Pick up the blue plastic cup with right gripper", + "subtask": "Place the blue blackboard erasure on the table with the right gripper", "subtask_index": 15 }, { - "subtask": "Place the white cup with the left gripper", + "subtask": "Abnormal", "subtask_index": 16 }, { - "subtask": "Place the blue cup with the right gripper", + "subtask": "Grasp the XX with the left gripper", "subtask_index": 17 }, { - "subtask": "Place the gray plastic cup on the table with left gripper", + "subtask": "Grasp the blue blackboard erasure with the left grippe", "subtask_index": 18 }, { - "subtask": "Pick up the green plastic cup with left gripper", + "subtask": "Pass the Rubik's Cube to the right gripper", "subtask_index": 19 }, { - "subtask": "Pour water into the blue plastic cup with right gripper", + "subtask": "Place the milk on the table with the right gripper\n", "subtask_index": 20 }, { - "subtask": "Grasp the green cup with the right gripper", + "subtask": "Pass the long bread to the right gripper\n", "subtask_index": 21 }, { - "subtask": "Pick up the blue plastic cup with left gripper", + "subtask": "\nPass the milk to the right gripper", "subtask_index": 22 }, { - "subtask": "Grasp the white cup with the left gripper", + "subtask": "Pass the long bread to the right gripper", "subtask_index": 23 }, { - "subtask": "Pour water from white cup to green cup with the left gripper", + "subtask": "Grasp the milk with the left gripper", "subtask_index": 24 }, { - "subtask": "Pour water from blue cup to white cup with the right gripper", + "subtask": "Pass the blue blackboard erasure to the right gripper", "subtask_index": 25 }, { - "subtask": "Place the blue plastic cup on the table with right gripper", + "subtask": "Place the long bread on the table with the right gripper", "subtask_index": 26 }, { - "subtask": "Pour water from white cup to blue cup with the left gripper", + "subtask": "Grasp the long bread with the left gripper", "subtask_index": 27 }, { - "subtask": "Pour water into the green plastic cup with left gripper", + "subtask": "Grasp the Rubik's Cube with the left gripper", "subtask_index": 28 }, { - "subtask": "Pour water from green cup to blue cup with the right gripper", + "subtask": "Place the yogurt on the table with the right gripper", "subtask_index": 29 }, { - "subtask": "Place the green plastic cup on the table with left gripper", + "subtask": "Place the plush banana on the table with the right gripper", "subtask_index": 30 }, { - "subtask": "Place the green plastic cup on the table with right gripper", + "subtask": "Pass the milk to the right gripper\n", "subtask_index": 31 }, { - "subtask": "Grasp the blue cup with the right gripper", + "subtask": "Place the grape on the table with the right gripper", "subtask_index": 32 }, { - "subtask": "Place the green cup with the right gripper", + "subtask": "Grasp the long bread with the left gripper\n", "subtask_index": 33 }, { - "subtask": "Pour water from green cup to blue cup with the left gripper", + "subtask": "Pass the milk to the right gripper", "subtask_index": 34 }, { - "subtask": "Place the green cup with the left gripper", + "subtask": "Pass the grape to the right gripper", "subtask_index": 35 }, { - "subtask": "End", + "subtask": "Pass the blue garbage bag to the right gripper", "subtask_index": 36 }, { - "subtask": "Place the blue cup with the left gripper", + "subtask": "Pass the xx to the right gripper", "subtask_index": 37 }, { - "subtask": "Pour water from green cup to white cup with the left gripper", + "subtask": "Grasp the grape with the left gripper", "subtask_index": 38 }, { - "subtask": "Right gripper", + "subtask": "Grasp the purple garbage bag with the left gripper", "subtask_index": 39 }, { - "subtask": "Pour water into the gray plastic cup with right gripper", + "subtask": "Place the shower sphere on the table with the right gripper", "subtask_index": 40 }, - { - "subtask": "Pour water into the gray plastic cup with left gripper", - "subtask_index": 41 - }, - { - "subtask": "Pour water from green cup to white cup with the right gripper", - "subtask_index": 42 - }, { "subtask": "null", - "subtask_index": 43 + "subtask_index": 41 } ], "atomic_actions": [ "grasp", - "pick", - "place", - "pour" + "lift", + "lower", + "handover", + "takeover" ], "robot_name": [ - "Galaxea_R1_Lite" + "agilex_cobot_magic" ], "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", "cam_right_wrist_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -48223,53 +45086,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 98, - "total_frames": 42644, + "total_episodes": 97, + "total_frames": 53653, "fps": 30, - "total_tasks": 44, - "total_videos": 392, + "total_tasks": 42, + "total_videos": 291, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, - "camera_views": 4, - "dataset_size": "1.43 GB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "636.04 MB" }, - "frame_num": 42644, - "dataset_size": "1.43 GB", - "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_pour_water_black_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 53653, + "dataset_size": "636.04 MB", + "data_structure": "Agilex_Cobot_Magic_pass_object_left_to_right_khaki_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (85 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:97" + "train": "0:96" }, "features": { - "observation.images.cam_head_left_rgb": { - "dtype": "video", - "shape": [ - 720, - 1280, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 720, - "video.width": 1280, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -48278,8 +45118,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -48291,7 +45131,7 @@ "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -48301,7 +45141,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -48314,7 +45154,7 @@ "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -48324,7 +45164,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -48337,7 +45177,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -48346,20 +45186,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -48368,14 +45220,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -48415,17 +45279,17 @@ }, "subtask_annotation": { "names": null, + "dtype": "int32", "shape": [ 5 - ], - "dtype": "int32" + ] }, "scene_annotation": { "names": null, + "dtype": "int32", "shape": [ 1 - ], - "dtype": "int32" + ] }, "eef_sim_pose_state": { "names": [ @@ -48442,10 +45306,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_sim_pose_action": { "names": [ @@ -48462,130 +45326,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" + ] }, "gripper_mode_state": { "names": [ "left_gripper_mode", "right_gripper_mode" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "gripper_mode_action": { "names": [ "left_gripper_mode", "right_gripper_mode" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "gripper_activity_state": { "names": [ "left_gripper_activity", "right_gripper_activity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "gripper_activity_action": { "names": [ "left_gripper_activity", "right_gripper_activity" ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", "shape": [ 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" ], - "dtype": "int32" + "dtype": "float32", + "shape": [ + 2 + ] } }, "authors": { @@ -48607,358 +45471,123 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "R1_Lite_tableware_arrangement": { - "path": "R1_Lite_tableware_arrangement", - "dataset_name": "tableware_arrangement", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" + "Airbot_MMK2_storage_block_both_hands": { + "task_categories": [ + "robotics" ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" + "language": [ + "en" ], - "tasks": "Put the chopsticks back from the plate", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_storage_block_both_hands", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "square_building_blocks", + "level1": "toys", + "level2": "square_building_blocks", "level3": null, "level4": null, "level5": null }, { - "object_name": "chopsticks", - "level1": "tableware", - "level2": "chopsticks", + "object_name": "cream_storage_basket", + "level1": "home_storage", + "level2": "cream_storage_basket", "level3": null, "level4": null, "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "pick up the building blocks with both hands simultaneously and put them into the white storage box." + ], + "sub_tasks": [ + { + "subtask": "Grasp the red block with the left gripper", + "subtask_index": 0 }, { - "object_name": "spoon", - "level1": "tableware", - "level2": "spoon", - "level3": null, - "level4": null, - "level5": null + "subtask": "End", + "subtask_index": 1 }, { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the orange block with the right gripper", + "subtask_index": 2 }, { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the red block into the white basket with the left gripper", + "subtask_index": 3 }, { - "object_name": "lunch_box", - "level1": "container", - "level2": "lunch_box", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the orange block into the white basket with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 } ], - "operation_platform_height": null, - "frame_range": "0-16015", - "dataset_size": "628.5MB", - "statistics": { - "total_episodes": 8, - "total_frames": 16015, - "total_tasks": 1, - "total_videos": 24, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "85abe0d0-6c0a-432a-9dd0-14e2ca009399", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Put the chopsticks back from the plate", - "Put the bowl back from the plate", - "Put the spoon back from the bowl", - "Pick the chopsticks and place it on the plate", - "abnormal", - "Pick the bowl and place it on the plate", - "Pick the spoon and place it in the bowl", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_tableware_arrangement_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_tableware_arrangement_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "leju_robot_hotel_services_ad": { - "path": "leju_robot_hotel_services_ad", - "dataset_name": "hotel_services_ad", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], "atomic_actions": [ "grasp", "pick", "place" ], - "tasks": "End", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "home_storage", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "card", - "level1": "nfc", - "level2": "card", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "sensor", - "level1": "electronic_products", - "level2": "sensor", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-73346", - "dataset_size": "4.7GB", - "statistics": { - "total_episodes": 430, - "total_frames": 73346, - "total_tasks": 1, - "total_videos": 1290, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "1bbfd926-a183-40a4-8649-0b9958b0ffcf", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "End", - "Take out the room card with right gripper", - "Hand the room card to the guest with right gripper", - "Hand the room card to the target.", - "Pick up the room card from the card holder.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_hotel_services_ad_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_hotel_services_ad_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Agilex_Cobot_Magic_close_drawer_bottom": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } - ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_close_drawer_bottom", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "office_workspace", - "level2": "office", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ - { - "object_name": "table", - "level1": "home_storage", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "three_layer_transparent_drawer", - "level1": "laboratory_supplies", - "level2": "three_layer_transparent_drawer", - "level3": null, - "level4": null, - "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "close the bottom drawer." - ], - "sub_tasks": [ - { - "subtask": "End", - "subtask_index": 0 - }, - { - "subtask": "Use the right gripper to contact the bottom layer of the storage cabinet", - "subtask_index": 1 - }, - { - "subtask": "Push the bottom drawer closed", - "subtask_index": 2 - }, - { - "subtask": "null", - "subtask_index": 3 - } - ], - "atomic_actions": [ - "grasp", - "push" - ], "robot_name": [ - "Agilex_Cobot_Magic" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -48969,23 +45598,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 16270, + "total_episodes": 49, + "total_frames": 3769, "fps": 30, - "total_tasks": 4, - "total_videos": 150, + "total_tasks": 6, + "total_videos": 196, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "157.62 MB" + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "104.08 MB" }, - "frame_num": 16270, - "dataset_size": "157.62 MB", - "data_structure": "Agilex_Cobot_Magic_close_drawer_bottom_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 3769, + "dataset_size": "104.08 MB", + "data_structure": "Airbot_MMK2_storage_block_both_hands_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:49" + "train": "0:48" }, "features": { "observation.images.cam_head_rgb": { @@ -49057,10 +45686,33 @@ "has_audio": false } }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, "observation.state": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -49069,32 +45721,42 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -49103,26 +45765,36 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -49162,17 +45834,17 @@ }, "subtask_annotation": { "names": null, - "dtype": "int32", "shape": [ 5 - ] + ], + "dtype": "int32" }, "scene_annotation": { "names": null, - "dtype": "int32", "shape": [ 1 - ] + ], + "dtype": "int32" }, "eef_sim_pose_state": { "names": [ @@ -49189,10 +45861,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_sim_pose_action": { "names": [ @@ -49209,130 +45881,70 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", "shape": [ 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" ], - "dtype": "float32", - "shape": [ - 2 - ] + "dtype": "int32" } }, "authors": { @@ -49354,21 +45966,22 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "G1edu-u3_pick_metal_bowl_ab": { - "path": "G1edu-u3_pick_metal_bowl_ab", - "dataset_name": "pick_metal_bowl_ab", + "AIRBOT_MMK2_store_beauty_blender_and_building_blocks": { + "path": "AIRBOT_MMK2_store_beauty_blender_and_building_blocks", + "dataset_name": "store_beauty_blender_and_building_blocks", "robot_type": "", "end_effector_type": [ - "three_finger_hand" + "five_finger_hand" ], "scene_type": [], "atomic_actions": [ "grasp", + "place", "pick" ], - "tasks": "Grasp the metal bowl and lift it to the center of the view with left gripper", + "tasks": "Grasp the green cuboid block with the right gripper", "objects": [ { "object_name": "table", @@ -49379,27 +45992,43 @@ "level5": null }, { - "object_name": "metal_bowl", - "level1": "bowl", - "level2": "metal_bowl", + "object_name": "marble", + "level1": "toy", + "level2": "marble", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "building_blocks", + "level1": "toy", + "level2": "building_blocks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "container", + "level2": "bowl", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-11322", - "dataset_size": "158.4MB", + "operation_platform_height": 77.2, + "frame_range": "0-9178", + "dataset_size": "300.5MB", "statistics": { - "total_episodes": 36, - "total_frames": 11322, + "total_episodes": 50, + "total_frames": 9178, "total_tasks": 1, - "total_videos": 36, + "total_videos": 200, "total_chunks": 1, - "chunks_size": 37, + "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "988d81c9-a23a-4a67-9613-aed7fcecf7e3", + "dataset_uuid": "fee911a7-d998-45da-b549-b1621ca063be", "language": [ "en", "zh" @@ -49408,9 +46037,12 @@ "robotics" ], "sub_tasks": [ - "Grasp the metal bowl and lift it to the center of the view with left gripper", + "Grasp the green cuboid block with the right gripper", + "Place the bullet into the bowl with the left gripper", "End", - "Grasp the metal bowl and lift it to the center of the view with right gripper", + "Grasp the bullet with the left gripper", + "Abnormal", + "Place the green cuboid block into the bowl with the right gripper", "null" ], "annotations": { @@ -49448,12 +46080,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "G1edu-u3_pick_metal_bowl_ab_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_pick_metal_bowl_ab_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_store_beauty_blender_and_building_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_store_beauty_blender_and_building_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "AgiBot-g1_remove_the_accessory": { - "path": "AgiBot-g1_remove_the_accessory", - "dataset_name": "remove_the_accessory", + "RMC-AIDA-L_place_the_fruits_repeatedly": { + "path": "RMC-AIDA-L_place_the_fruits_repeatedly", + "dataset_name": "place_the_fruits_repeatedly", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -49462,9 +46094,10 @@ "atomic_actions": [ "grasp", "place", - "pick" + "pick", + "pour" ], - "tasks": "Place the hard drive bracket into the box.", + "tasks": "Pick up the bowl with the left gripper", "objects": [ { "object_name": "table", @@ -49475,35 +46108,59 @@ "level5": null }, { - "object_name": "box", + "object_name": "bowl", "level1": "container", - "level2": "box", + "level2": "bowl", "level3": null, "level4": null, "level5": null }, { - "object_name": "accessories", - "level1": "industrial_parts", - "level2": "accessories", + "object_name": "peach", + "level1": "fruit", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange", + "level1": "fruit", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "egg_yolk_pastry", + "level1": "food", + "level2": "egg_yolk_pastry", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread", + "level1": "food", + "level2": "bread", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-27353", - "dataset_size": "14.8GB", + "operation_platform_height": 77.2, + "frame_range": "0-541540", + "dataset_size": "3.7GB", "statistics": { - "total_episodes": 68, - "total_frames": 27353, - "total_tasks": 1, - "total_videos": 544, + "total_episodes": 481, + "total_frames": 541540, + "total_tasks": 8, + "total_videos": 1443, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "fcba76dc-2672-4dc1-9b8e-b375ede64077", + "dataset_uuid": "3448d8ea-8209-43cf-bfcc-550863a26b13", "language": [ "en", "zh" @@ -49512,8 +46169,22 @@ "robotics" ], "sub_tasks": [ - "Place the hard drive bracket into the box.", - "Pick up the hard drive bracket from the feeding area.", + "Pick up the bowl with the left gripper", + "Place the bread into the bowl", + "abnormal", + "Pick up the egg yolk pastry with the right gripper", + "Pour the egg yolk pastry out of bowl", + "Pour the orange out of the bowl", + "Place the peach into the bowl", + "Pour the peach out of bowl", + "Place the egg yolk pastry into the bowl", + "Pick up the bread with the right gripper", + "Pick up the peach with the right gripper", + "Pour the bread out of the bowl", + "end", + "Place the bowl on the table", + "Place the orange into the bowl", + "Pick up the orange with the right gripper", "null" ], "annotations": { @@ -49551,12 +46222,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AgiBot-g1_remove_the_accessory_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_remove_the_accessory_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "RMC-AIDA-L_place_the_fruits_repeatedly_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_place_the_fruits_repeatedly_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "leju_robot_box_storage_parcel_g": { - "path": "leju_robot_box_storage_parcel_g", - "dataset_name": "box_storage_parcel_g", + "AIRBOT_MMK2_place_the_piano_and_the_needle-nose_pliers": { + "path": "AIRBOT_MMK2_place_the_piano_and_the_needle-nose_pliers", + "dataset_name": "place_the_piano_and_the_needle-nose_pliers", "robot_type": "", "end_effector_type": [ "five_finger_hand" @@ -49564,10 +46235,10 @@ "scene_type": [], "atomic_actions": [ "grasp", - "pick", - "place" + "place", + "pick" ], - "tasks": "Place the package into the parcel locker.", + "tasks": "End", "objects": [ { "object_name": "table", @@ -49578,43 +46249,43 @@ "level5": null }, { - "object_name": "box", - "level1": "home_storage", - "level2": "box", + "object_name": "hand_tapping_the_qin", + "level1": "toy", + "level2": "hand_tapping_the_qin", "level3": null, "level4": null, "level5": null }, { - "object_name": "parcel", + "object_name": "lid", "level1": "container", - "level2": "parcel", + "level2": "lid", "level3": null, "level4": null, "level5": null }, { - "object_name": "conveyor_belt", - "level1": "industrial_equipment", - "level2": "conveyor_belt", + "object_name": "long-nose_pliers", + "level1": "tool", + "level2": "long-nose_pliers", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-182793", - "dataset_size": "9.4GB", + "operation_platform_height": 77.2, + "frame_range": "0-25318", + "dataset_size": "826.8MB", "statistics": { - "total_episodes": 492, - "total_frames": 182793, + "total_episodes": 49, + "total_frames": 25318, "total_tasks": 1, - "total_videos": 1476, + "total_videos": 196, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "1b130f08-2c4e-4724-ad03-8ecda43684fd", + "dataset_uuid": "230992b4-bba2-4f5b-99ce-03e63d148ce1", "language": [ "en", "zh" @@ -49623,10 +46294,13 @@ "robotics" ], "sub_tasks": [ - "Place the package into the parcel locker.", - "Pick up the package from the inbound machine.", - "Pick up the package from the conveyor belt.", - "Place the package onto the inbound machine.", + "End", + "Static", + "Place the tongs on the white lid with the right gripper", + "Grasp the tongs with the right gripper", + "Place the xylophone on the white lid with the left gripper", + "Grasp the xylophone with the left gripper", + "Abnormal", "null" ], "annotations": { @@ -49664,23 +46338,21 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "leju_robot_box_storage_parcel_g_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_box_storage_parcel_g_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_place_the_piano_and_the_needle-nose_pliers_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_place_the_piano_and_the_needle-nose_pliers_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "AIRBOT_MMK2_place_the_small_bowl_of_canned_food": { - "path": "AIRBOT_MMK2_place_the_small_bowl_of_canned_food", - "dataset_name": "place_the_small_bowl_of_canned_food", + "AIRBOT_MMK2_take_the_book": { + "path": "AIRBOT_MMK2_take_the_book", + "dataset_name": "take_the_book", "robot_type": "", "end_effector_type": [ "five_finger_hand" ], "scene_type": [], "atomic_actions": [ - "grasp", - "pick", - "place" + "pick" ], - "tasks": "End", + "tasks": "Lay the book down with the right gripper", "objects": [ { "object_name": "table", @@ -49691,35 +46363,35 @@ "level5": null }, { - "object_name": "mini_table", - "level1": "furniture", - "level2": "mini_table", + "object_name": "bookshelf", + "level1": "container", + "level2": "bookshelf", "level3": null, "level4": null, "level5": null }, { - "object_name": "small_bowl_of_canned_food", - "level1": "food", - "level2": "small_bowl_of_canned_food", + "object_name": "book", + "level1": "stationery", + "level2": "book", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-7610", - "dataset_size": "313.5MB", + "frame_range": "0-9314", + "dataset_size": "352.3MB", "statistics": { - "total_episodes": 50, - "total_frames": 7610, - "total_tasks": 1, - "total_videos": 200, + "total_episodes": 79, + "total_frames": 9314, + "total_tasks": 2, + "total_videos": 316, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "a9c7e711-c375-4aa9-8ce2-93c6f016b80d", + "dataset_uuid": "a04264d7-7e81-40cc-af53-0ec87db1f556", "language": [ "en", "zh" @@ -49728,10 +46400,10 @@ "robotics" ], "sub_tasks": [ - "End", + "Lay the book down with the right gripper", + "Hold the book with the right gripper", "Abnormal", - "Place the small bowl of canned food on the table with the right gripper", - "Grasp the small bowl of canned food with right gripper", + "End", "null" ], "annotations": { @@ -49769,15 +46441,15 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_place_the_small_bowl_of_canned_food_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_place_the_small_bowl_of_canned_food_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_take_the_book_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_take_the_book_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "G1edu-u3_pullBowl_storage_bread_unordered_a": { - "path": "G1edu-u3_pullBowl_storage_bread_unordered_a", - "dataset_name": "pullBowl_storage_bread_unordered_a", + "Cobot_Magic_move_the_plate": { + "path": "Cobot_Magic_move_the_plate", + "dataset_name": "move_the_plate", "robot_type": "", "end_effector_type": [ - "five_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ @@ -49785,7 +46457,7 @@ "pick", "place" ], - "tasks": "Abnormal", + "tasks": "Pick the container that can hold fruits", "objects": [ { "object_name": "table", @@ -49797,34 +46469,129 @@ }, { "object_name": "plate", - "level1": "kitchen_supplies", + "level1": "container", "level2": "plate", "level3": null, "level4": null, "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-25057", + "dataset_size": "418.0MB", + "statistics": { + "total_episodes": 98, + "total_frames": 25057, + "total_tasks": 1, + "total_videos": 294, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 50 + }, + "dataset_uuid": "8d39690e-e415-4f1c-9e39-2c0a83b6fe12", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Pick the container that can hold fruits", + "Move the picked object to the right side of the table", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_move_the_plate_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_move_the_plate_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "AgiBot-g1_robotic_arm_picks_up_battery": { + "path": "AgiBot-g1_robotic_arm_picks_up_battery", + "dataset_name": "robotic_arm_picks_up_battery", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the power supply on the operating table.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, { - "object_name": "towel", - "level1": "daily_necessities", - "level2": "towel", + "object_name": "carton", + "level1": "laboratory_supplies", + "level2": "carton", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "battery", + "level1": "tool", + "level2": "battery", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-158699", - "dataset_size": "3.3GB", + "operation_platform_height": null, + "frame_range": "0-30215", + "dataset_size": "17.9GB", "statistics": { - "total_episodes": 188, - "total_frames": 158699, + "total_episodes": 56, + "total_frames": 30215, "total_tasks": 1, - "total_videos": 564, + "total_videos": 448, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "11ed8a7f-106a-4c08-98a4-0abdd1b97a49", + "dataset_uuid": "14af3598-fff7-4c0b-bca2-16c57884c70e", "language": [ "en", "zh" @@ -49833,13 +46600,8 @@ "robotics" ], "sub_tasks": [ - "Abnormal", - "Place the long bread in pink bowl with left hand", - "End", - "Grasp the round bread with left hand", - "Grasp the long bread with left hand", - "Place the round bread in pink bowl with left hand", - "Move the pink bowl to the center of table with right hand", + "Place the power supply on the operating table.", + "Grab and lift the power supply from the large box.", "null" ], "annotations": { @@ -49877,10 +46639,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "G1edu-u3_pullBowl_storage_bread_unordered_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_pullBowl_storage_bread_unordered_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AgiBot-g1_robotic_arm_picks_up_battery_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_robotic_arm_picks_up_battery_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_swap_cake_pumpkin_plate": { + "Airbot_MMK2_storage_braised_pork_belly_shrimp": { "task_categories": [ "robotics" ], @@ -49910,7 +46672,7 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_swap_cake_pumpkin_plate", + "dataset_name": "Airbot_MMK2_storage_braised_pork_belly_shrimp", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", @@ -49922,25 +46684,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", + "object_name": "shrimp", + "level1": "prepared_dishes", + "level2": "shrimp", "level3": null, "level4": null, "level5": null }, { - "object_name": "pumpkin", - "level1": "vegetables", - "level2": "pumpkin", + "object_name": "braised_pork", + "level1": "prepared_dishes", + "level2": "braised_pork", "level3": null, "level4": null, "level5": null }, { - "object_name": "cake", - "level1": "bread", - "level2": "cake", + "object_name": "plate", + "level1": "plates", + "level2": "plate", "level3": null, "level4": null, "level5": null @@ -49948,32 +46710,36 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "take the cake out with left hand and put the pumpkin in with right hand." + "put the braised pork and shrimp on the plate" ], "sub_tasks": [ { - "subtask": "Grasp the cake with the left gripper", + "subtask": "Place the braised pork in brown sauce into the plate with the left gripper", "subtask_index": 0 }, { - "subtask": "Grasp the pumpkinx with the right gripper", + "subtask": "Grasp the shrimp with the right gripper", "subtask_index": 1 }, { - "subtask": "End", + "subtask": "Place the shrimp into the plate with the right gripper", "subtask_index": 2 }, { - "subtask": "Place the cake on the table with the left gripper", + "subtask": "Grasp the cake from the table and with the left gripper", "subtask_index": 3 }, { - "subtask": "Place the pumpkin into the plate with the right gripper", + "subtask": "End", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "Grasp the braised pork in brown sauce with the left gripper", "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 } ], "atomic_actions": [ @@ -50013,23 +46779,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 13761, + "total_episodes": 38, + "total_frames": 5834, "fps": 30, - "total_tasks": 6, - "total_videos": 200, + "total_tasks": 7, + "total_videos": 152, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "399.28 MB" + "dataset_size": "277.59 MB" }, - "frame_num": 13761, - "dataset_size": "399.28 MB", - "data_structure": "Airbot_MMK2_swap_cake_pumpkin_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 5834, + "dataset_size": "277.59 MB", + "data_structure": "Airbot_MMK2_storage_braised_pork_belly_shrimp_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (26 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:49" + "train": "0:37" }, "features": { "observation.images.cam_head_rgb": { @@ -50383,163 +47149,67 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "G1edu-u3_pick_crumpled_paper_aa": { - "path": "G1edu-u3_pick_crumpled_paper_aa", - "dataset_name": "pick_crumpled_paper_aa", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" + "Agilex_Cobot_Magic_erase_board_right": { + "task_categories": [ + "robotics" ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick" + "language": [ + "en" ], - "tasks": "End", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_erase_board_right", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "education", + "level2": "school", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { "object_name": "table", - "level1": "furniture", + "level1": "home_storage", "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "paper", - "level1": "rubbish", - "level2": "paper", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-4485", - "dataset_size": "58.9MB", - "statistics": { - "total_episodes": 13, - "total_frames": 4485, - "total_tasks": 1, - "total_videos": 13, - "total_chunks": 1, - "chunks_size": 13, - "fps": 30 - }, - "dataset_uuid": "b6576ff7-68ba-498d-a50c-c97bad67554e", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "End", - "Grasp the paper ball and lift it to the center of the view with right gripper  ", - "Grasp the paper ball and lift it to the center of the view with left gripper ", - "Abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_pick_crumpled_paper_aa_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_pick_crumpled_paper_aa_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Agilex_Cobot_Magic_erase_board_left_side": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } - ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_erase_board_left_side", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "education", - "level2": "school", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ - { - "object_name": "table", - "level1": "home_storage", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "whiteboard", - "level1": "stationery", - "level2": "whiteboard", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "red_whiteboard_Eraser", - "level1": "stationery", - "level2": "red_whiteboard_Eraser", + "object_name": "whiteboard", + "level1": "stationery", + "level2": "whiteboard", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "whiteboard_eraser", + "level1": "stationery", + "level2": "whiteboard_eraser", "level3": null, "level4": null, "level5": null @@ -50547,47 +47217,43 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "wipe off the handwriting on the whiteboard." + "use the right gripper to pick up the board eraser, wipe the notes on the whiteboard clean, and then put them back in place." ], "sub_tasks": [ { - "subtask": "Place the eraser with the left gripper", + "subtask": "Right gripper", "subtask_index": 0 }, { - "subtask": "Grasp the eraser with the left gripper", + "subtask": "Wipe off the writing on the board with right gripper", "subtask_index": 1 }, { - "subtask": "End", + "subtask": "Place the board eraser on the right side of board with right gripper", "subtask_index": 2 }, { - "subtask": "Grasp the eraser and wipe the blackboard with the right gripper", + "subtask": "Move the board eraser to the right of the whiteboard with right gripper", "subtask_index": 3 }, { - "subtask": "Place the eraser with the right gripper", + "subtask": "Pick up the board eraser with right gripper", "subtask_index": 4 }, { - "subtask": "Grasp the eraser with the right gripper", + "subtask": "End", "subtask_index": 5 }, - { - "subtask": "Grasp the eraser and wipe the blackboard with the left gripper", - "subtask_index": 6 - }, { "subtask": "null", - "subtask_index": 7 + "subtask_index": 6 } ], "atomic_actions": [ "grasp", "lift", - "lower", - "wipe" + "wipe", + "handover" ], "robot_name": [ "Agilex_Cobot_Magic" @@ -50619,23 +47285,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 101, - "total_frames": 51478, + "total_episodes": 50, + "total_frames": 28998, "fps": 30, - "total_tasks": 8, - "total_videos": 303, + "total_tasks": 7, + "total_videos": 150, "total_chunks": 1, "chunks_size": 1000, "state_dim": 26, "action_dim": 26, "camera_views": 3, - "dataset_size": "490.77 MB" + "dataset_size": "249.99 MB" }, - "frame_num": 51478, - "dataset_size": "490.77 MB", - "data_structure": "Agilex_Cobot_Magic_erase_board_left_side_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (89 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 28998, + "dataset_size": "249.99 MB", + "data_structure": "Agilex_Cobot_Magic_erase_board_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:100" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -51006,1015 +47672,562 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Airbot_MMK2_close_drawer": { + "alpha_bot_2_stack_building_blocks": { + "path": "alpha_bot_2_stack_building_blocks", + "dataset_name": "stack_building_blocks", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Grasp the blue building block with left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "building_blocks", + "level1": "toys", + "level2": "building_blocks", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-130049", + "dataset_size": "3.4GB", + "statistics": { + "total_episodes": 139, + "total_frames": 130049, + "total_tasks": 1, + "total_videos": 556, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "f7781909-81c7-40ee-98a5-67783a48dc66", + "language": [ + "en", + "zh" + ], "task_categories": [ "robotics" ], - "language": [ - "en" + "sub_tasks": [ + "Grasp the blue building block with left gripper", + "End", + "Place the pink building block in the center of table with left gripper", + "Grasp the pink building block with left gripper", + "Place the blue building block on the pink building blocks with left gripper", + "Grasp the green building block with left gripper", + "Place the pink building block on the blue building blocks with right gripper", + "Place the green block on the pink block with left gripper", + "Place the blue building block on the blue building blocks with right gripper", + "Place the blue building block on the pink building blocks with right gripper", + "Grasp the blue building block with right gripper", + "Place the pink building block on the blue building blocks with left gripper", + "Place the blue building block on the blue building blocks with left gripper", + "Grasp the pink building block with right gripper", + "Place the pink block on the blue block with right gripper", + "Place the blue building block in the center of table with left gripper", + "null" ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", "tags": [ "RoboCOIN", "LeRobot" ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "alpha_bot_2_stack_building_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "alpha_bot_2_stack_building_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Cobot_Magic_catch_the_ball": { + "path": "Cobot_Magic_catch_the_ball", + "dataset_name": "catch_the_ball", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_close_drawer", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "household", - "level2": "kitchen", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Put the picked-up object on the table.", "objects": [ { - "object_name": "layer_transparent_drawer", - "level1": "storage_utensils", - "level2": "layer_transparent_drawer", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "close the top drawer by hand." - ], - "sub_tasks": [ - { - "subtask": "End", - "subtask_index": 0 - }, - { - "subtask": "Close the top drawer with the right hand", - "subtask_index": 1 }, { - "subtask": "null", - "subtask_index": 2 + "object_name": "ball", + "level1": "toy", + "level2": "ball", + "level3": null, + "level4": null, + "level5": null } ], - "atomic_actions": [ - "push" + "operation_platform_height": 77.2, + "frame_range": "0-57085", + "dataset_size": "877.6MB", + "statistics": { + "total_episodes": 98, + "total_frames": 57085, + "total_tasks": 1, + "total_videos": 294, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 50 + }, + "dataset_uuid": "3ea12ae6-b4f3-453d-b56f-bca0e5a346c8", + "language": [ + "en", + "zh" ], - "robot_name": [ - "Airbot_MMK2" + "task_categories": [ + "robotics" ], - "end_effector_type": "five_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "sub_tasks": [ + "Put the picked-up object on the table.", + "Grab the rolling spherical object.", + "Grab the spherical object with your left arm.", + "Place the sphere on the white object.", + "null" ], - "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" + "data_schema": "Cobot_Magic_catch_the_ball_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_catch_the_ball_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Cobot_Magic_open_the_shoebox": { + "path": "Cobot_Magic_open_the_shoebox", + "dataset_name": "open_the_shoebox", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pull" + ], + "tasks": "Open the shoe box", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tablecloth", + "level1": "clothing", + "level2": "tablecloth", + "level3": null, + "level4": null, + "level5": null + } ], + "operation_platform_height": 77.2, + "frame_range": "0-185740", + "dataset_size": "2.8GB", "statistics": { - "total_episodes": 50, - "total_frames": 7377, - "fps": 30, + "total_episodes": 299, + "total_frames": 185740, "total_tasks": 3, - "total_videos": 200, + "total_videos": 897, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "217.36 MB" - }, - "frame_num": 7377, - "dataset_size": "217.36 MB", - "data_structure": "Airbot_MMK2_close_drawer_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", - "splits": { - "train": "0:49" + "fps": 50 }, - "features": { - "observation.images.cam_head_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_left_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_right_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.state": { - "dtype": "float32", - "shape": [ - 36 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" - ] - }, - "action": { - "dtype": "float32", - "shape": [ - 36 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" - ] - }, - "timestamp": { - "dtype": "float32", - "shape": [ - 1 - ], - "names": null - }, - "frame_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "episode_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "task_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "subtask_annotation": { - "names": null, - "shape": [ - 5 - ], - "dtype": "int32" - }, - "scene_annotation": { - "names": null, - "shape": [ - 1 - ], - "dtype": "int32" - }, - "eef_sim_pose_state": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_sim_pose_action": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_direction_action": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - } + "dataset_uuid": "1708df8d-97e4-42b0-a8ee-de1bb93b4d77", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Open the shoe box", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" }, "authors": { "contributed_by": [ { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } ] }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" - }, - "Agilex_Cobot_Magic_storage_bread_basket": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], + "license": "apache-2.0", "tags": [ "RoboCOIN", "LeRobot" ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_open_the_shoebox_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_open_the_shoebox_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "RMC-AIDA-L_pour_rice": { + "path": "RMC-AIDA-L_pour_rice", + "dataset_name": "pour_rice", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_storage_bread_basket", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "household", - "level2": "living_room", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place", + "pour" + ], + "tasks": "Pick up the cup with rice in it with the left gripper", "objects": [ { "object_name": "table", - "level1": "home_storage", + "level1": "furniture", "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "small_yellow _basket", - "level1": "home_storage", - "level2": "small_yellow _basket", + "object_name": "rice", + "level1": "food", + "level2": "rice", "level3": null, "level4": null, "level5": null }, { - "object_name": "waffle", - "level1": "food", - "level2": "waffle", + "object_name": "grain_cup", + "level1": "container", + "level2": "grain_cup", "level3": null, "level4": null, "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "Put the bakery in the basket." - ], - "sub_tasks": [ - { - "subtask": "Abnormal", - "subtask_index": 0 - }, - { - "subtask": "Grasp the bread with right gripper", - "subtask_index": 1 }, { - "subtask": "End", - "subtask_index": 2 - }, - { - "subtask": "Grasp the bread with left gripper", - "subtask_index": 3 - }, - { - "subtask": "Place the bread in the basket with right gripper", - "subtask_index": 4 - }, - { - "subtask": "Place the bread in the basket with left gripper", - "subtask_index": 5 - }, - { - "subtask": "null", - "subtask_index": 6 + "object_name": "rice_cooker", + "level1": "home_appliances", + "level2": "rice_cooker", + "level3": null, + "level4": null, + "level5": null } ], - "atomic_actions": [ - "grasp", - "lift", - "lower" - ], - "robot_name": [ - "Agilex_Cobot_Magic" - ], - "end_effector_type": "two_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb" - ], - "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" - }, - "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" - ], + "operation_platform_height": 77.2, + "frame_range": "0-413739", + "dataset_size": "2.9GB", "statistics": { - "total_episodes": 98, - "total_frames": 32042, - "fps": 30, - "total_tasks": 7, - "total_videos": 294, + "total_episodes": 714, + "total_frames": 413739, + "total_tasks": 2, + "total_videos": 2142, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "354.51 MB" - }, - "frame_num": 32042, - "dataset_size": "354.51 MB", - "data_structure": "Agilex_Cobot_Magic_storage_bread_basket_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", - "splits": { - "train": "0:97" + "fps": 30 }, - "features": { - "observation.images.cam_head_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_left_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false + "dataset_uuid": "c79b49bf-cadf-4094-831d-bbf6403956c6", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Pick up the cup with rice in it with the left gripper", + "End", + "Move the bowl in the center of view with left gripper", + "Pick up the bowl with the left gripper", + "Place the cup on the table with the right gripper", + "Pour the rice from the cup into the bowl with the left gripper", + "Place the bowl in the center of view with the right gripper", + "Static", + "Place the cup on the table with the left gripper", + "Pour the rice from the cup into the bowl with the right gripper", + "Pick up the cup with rice in it with the right gripper", + "Pick up the bowl with the right gripper", + "Place the bowl in the center of view with the left gripper", + "Grasp the cup with rice in it with right gripper", + "abnormal", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } - }, - "observation.images.cam_right_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "RMC-AIDA-L_pour_rice_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_pour_rice_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "AIRBOT_MMK2_bowl_storage_pepper": { + "path": "AIRBOT_MMK2_bowl_storage_pepper", + "dataset_name": "bowl_storage_pepper", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the white bowl on the pink bowl with the left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, - "observation.state": { - "dtype": "float32", - "shape": [ - 26 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" - ] - }, - "action": { - "dtype": "float32", - "shape": [ - 26 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" - ] - }, - "timestamp": { - "dtype": "float32", - "shape": [ - 1 - ], - "names": null - }, - "frame_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "episode_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "task_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "subtask_annotation": { - "names": null, - "shape": [ - 5 - ], - "dtype": "int32" - }, - "scene_annotation": { - "names": null, - "shape": [ - 1 - ], - "dtype": "int32" - }, - "eef_sim_pose_state": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_sim_pose_action": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_direction_action": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "chili_pepper", + "level1": "vegetables", + "level2": "chili_pepper", + "level3": null, + "level4": null, + "level5": null }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] + { + "object_name": "bowl", + "level1": "container", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] + { + "object_name": "pumpkin", + "level1": "vegetables", + "level2": "pumpkin", + "level3": null, + "level4": null, + "level5": null } + ], + "operation_platform_height": 77.2, + "frame_range": "0-19594", + "dataset_size": "715.7MB", + "statistics": { + "total_episodes": 150, + "total_frames": 19594, + "total_tasks": 3, + "total_videos": 600, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "83f67fd9-c480-4786-ae1d-41475a4f4618", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the white bowl on the pink bowl with the left gripper", + "Grasp the white bowl with the left gripper", + "Static", + "End", + "Abnormal", + "Grasp the pumpkin with the left gripper", + "Place the pumpkin on the pink bowl with the left gripper", + "Grasp the yellow round chili pepper with the right gripper", + "Place yellow round chili pepper on the blue bowl with the right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" }, "authors": { "contributed_by": [ { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } ] }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AIRBOT_MMK2_bowl_storage_pepper_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_bowl_storage_pepper_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "alpha_bot_2_item_reversal": { - "path": "alpha_bot_2_item_reversal", - "dataset_name": "item_reversal", + "R1_Lite_move_the_position_of_the_cookie": { + "path": "R1_Lite_move_the_position_of_the_cookie", + "dataset_name": "move_the_position_of_the_cookie", "robot_type": "", "end_effector_type": [ "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ - "grasp", + "place", "pick", - "place" + "grasp" ], - "tasks": "Grasp the cup with left gripper", + "tasks": "Place the cookie on the table with right gripper", "objects": [ { "object_name": "table", @@ -52025,27 +48238,355 @@ "level5": null }, { - "object_name": "cup", - "level1": "kitchen_supplies", - "level2": "cup", + "object_name": "pen", + "level1": "office_supplies", + "level2": "pen", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "fruit", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bath_ball", + "level1": "daily_necessities", + "level2": "bath_ball", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "container", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "container", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "can", + "level1": "container", + "level2": "can", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eraser", + "level1": "office_supplies", + "level2": "eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "hard_cleanser", + "level1": "daily_necessities", + "level2": "hard_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "long_bread", + "level1": "food", + "level2": "long_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peeler", + "level1": "tool", + "level2": "peeler", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "block", + "level1": "toy", + "level2": "block", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "duck", + "level1": "toy", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "soap", + "level1": "daily_necessities", + "level2": "soap", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "box", + "level1": "container", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "basket", + "level1": "container", + "level2": "basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cola", + "level1": "drink", + "level2": "cola", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "detergent", + "level1": "daily_necessities", + "level2": "detergent", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "egg_beater", + "level1": "electrical_appliances", + "level2": "egg_beater", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "towel", + "level1": "clothing", + "level2": "towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange", + "level1": "fruit", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "fruit", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "marker", + "level1": "office_supplies", + "level2": "marker", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rubiks_cube", + "level1": "toy", + "level2": "rubiks_cube", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread_slice", + "level1": "food", + "level2": "bread_slice", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brush", + "level1": "daily_necessities", + "level2": "brush", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yogurt", + "level1": "drink", + "level2": "yogurt", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "power_strip", + "level1": "electric_appliance", + "level2": "power_strip", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "milk", + "level1": "drink", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "soda", + "level1": "drink", + "level2": "soda", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "lime", + "level1": "fruit", + "level2": "lime", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "coffee_capsule", + "level1": "drink", + "level2": "coffee_capsule", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "dish", + "level1": "container", + "level2": "dish", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "glass", + "level1": "furniture", + "level2": "glass", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "egg_yolk_pastry", + "level1": "food", + "level2": "egg_yolk_pastry", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "glasses_case", + "level1": "daily_necessities", + "level2": "glasses_case", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "gum", + "level1": "daily_necessities", + "level2": "gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tape", + "level1": "daily_necessities", + "level2": "tape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "soft_cleanser", + "level1": "daily_necessities", + "level2": "soft_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chips", + "level1": "food", + "level2": "chips", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chocolate", + "level1": "food", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cookie", + "level1": "food", + "level2": "cookie", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "spoon", + "level1": "tableware", + "level2": "spoon", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-82235", - "dataset_size": "2.1GB", + "operation_platform_height": 77.2, + "frame_range": "0-5045", + "dataset_size": "198.3MB", "statistics": { - "total_episodes": 90, - "total_frames": 82235, + "total_episodes": 25, + "total_frames": 5045, "total_tasks": 1, - "total_videos": 360, + "total_videos": 100, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "aac569d8-dbc7-4128-a573-659adbe0ed51", + "dataset_uuid": "7015670f-e702-4c08-a662-e74aee0c0872", "language": [ "en", "zh" @@ -52054,10 +48595,13 @@ "robotics" ], "sub_tasks": [ - "Grasp the cup with left gripper", - "Place the cup on the table with right gripper", - "Pass the cup to the right gripper", + "Place the cookie on the table with right gripper", + "Static", + "Place the cookie on the table with left gripper", "End", + "Grasp the cookie with right gripper", + "Grasp the cookie with left gripper", + "Abnormal", "null" ], "annotations": { @@ -52095,10 +48639,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "alpha_bot_2_item_reversal_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "alpha_bot_2_item_reversal_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "R1_Lite_move_the_position_of_the_cookie_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_move_the_position_of_the_cookie_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_take_egg": { + "Airbot_MMK2_move_tub": { "task_categories": [ "robotics" ], @@ -52128,11 +48672,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_take_egg", + "dataset_name": "Airbot_MMK2_move_tub", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "catering", - "level2": "restaurant", + "level1": "household", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -52140,17 +48684,9 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "egg_carton", - "level1": "disposable_items", - "level2": "egg_carton", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg", - "level1": "eggs", - "level2": "egg", + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", "level3": null, "level4": null, "level5": null @@ -52158,24 +48694,28 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "take out the brown eggs from the egg box." + "pass the plate to your right hand with your left hand and then put it down." ], "sub_tasks": [ { - "subtask": "End", + "subtask": "Grasp the white box with left gripper", "subtask_index": 0 }, { - "subtask": "Place the egg on the table with the right gripper", + "subtask": "Place the white box in the center of table with right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the egg from the egg storage box with the right gripper", + "subtask": "End", "subtask_index": 2 }, { - "subtask": "null", + "subtask": "Pass the white box to right gripper", "subtask_index": 3 + }, + { + "subtask": "null", + "subtask_index": 4 } ], "atomic_actions": [ @@ -52215,23 +48755,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 101, - "total_frames": 18643, + "total_episodes": 37, + "total_frames": 11634, "fps": 30, - "total_tasks": 4, - "total_videos": 404, + "total_tasks": 5, + "total_videos": 148, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "758.58 MB" + "dataset_size": "340.31 MB" }, - "frame_num": 18643, - "dataset_size": "758.58 MB", - "data_structure": "Airbot_MMK2_take_egg_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (89 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 11634, + "dataset_size": "340.31 MB", + "data_structure": "Airbot_MMK2_move_tub_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (25 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:100" + "train": "0:36" }, "features": { "observation.images.cam_head_rgb": { @@ -52585,139 +49125,554 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Cobot_Magic_steamer_storage_dumpling": { - "path": "Cobot_Magic_steamer_storage_dumpling", - "dataset_name": "steamer_storage_dumpling", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" + "Agilex_Cobot_Magic_erase_board_passing_left_to_right": { + "task_categories": [ + "robotics" ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" + "language": [ + "en" ], - "tasks": "use the left arm to put the dumpling into the steamer", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_erase_board_passing_left_to_right", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "education", + "level2": "school", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { "object_name": "table", - "level1": "furniture", + "level1": "home_storage", "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "dumplings", - "level1": "food", - "level2": "dumplings", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tablecloth", - "level1": "clothing", - "level2": "tablecloth", + "object_name": "whiteboard", + "level1": "stationery", + "level2": "whiteboard", "level3": null, "level4": null, "level5": null }, { - "object_name": "steamer", - "level1": "container", - "level2": "steamer", + "object_name": "whiteboard_eraser", + "level1": "stationery", + "level2": "whiteboard_eraser", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-417589", - "dataset_size": "19.8GB", - "statistics": { - "total_episodes": 580, - "total_frames": 417589, - "total_tasks": 6, - "total_videos": 1740, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "df5bb1ee-1fd7-4b3d-8502-4cda9ee6bbc5", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use the left gripper to place the eraser in the right, use the right gripper to pick up the eraser, wipe the notes on the whiteboard clean, and then put it down." ], "sub_tasks": [ - "use the left arm to put the dumpling into the steamer", - "Abnormal", - "use the right arm to put the dumpling into the steamer", - "Grasp the dumpling with left gripper", - "End", - "Place the dumpling on the steamer with right gripper", - "use the left arm to grab a dumpling", - "Grasp the dumpling with right gripper", - "use the right arm to grab a dumpling", - "Place the dumpling on the steamer with left gripper", - "null" + { + "subtask": "Right gripper", + "subtask_index": 0 + }, + { + "subtask": "Move the board eraser to the center of the whiteboard with left gripper", + "subtask_index": 1 + }, + { + "subtask": "Wipe off the writing on the board with right gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the board eraser on the right side of board with right gripper", + "subtask_index": 3 + }, + { + "subtask": "Move the board eraser to the right of the whiteboard with right gripper", + "subtask_index": 4 + }, + { + "subtask": "Pick up the board eraser with right gripper", + "subtask_index": 5 + }, + { + "subtask": "Move the board eraser to the right of the whiteboard with left gripper", + "subtask_index": 6 + }, + { + "subtask": "End", + "subtask_index": 7 + }, + { + "subtask": "Left gripper", + "subtask_index": 8 + }, + { + "subtask": "null", + "subtask_index": 9 + } ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" + "atomic_actions": [ + "grasp", + "lift", + "wipe", + "handover" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 50, + "total_frames": 38393, + "fps": 30, + "total_tasks": 10, + "total_videos": 150, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "335.24 MB" + }, + "frame_num": 38393, + "dataset_size": "335.24 MB", + "data_structure": "Agilex_Cobot_Magic_erase_board_passing_left_to_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:49" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] + }, + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } }, "authors": { "contributed_by": [ { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" } ] }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_steamer_storage_dumpling_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_steamer_storage_dumpling_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Cobot_Magic_plate_storaje_baozi": { - "path": "Cobot_Magic_plate_storaje_baozi", - "dataset_name": "plate_storaje_baozi", + "G1edu-u3_put_the_lemon_af": { + "path": "G1edu-u3_put_the_lemon_af", + "dataset_name": "put_the_lemon_af", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "three_finger_hand" ], "scene_type": [], "atomic_actions": [ "grasp", - "place", - "pick" + "place" ], - "tasks": "Grasp the baozi on the steamer with left gripper", + "tasks": "End", "objects": [ { "object_name": "table", @@ -52728,43 +49683,27 @@ "level5": null }, { - "object_name": "tablecloth", - "level1": "clothing", - "level2": "tablecloth", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "baozi", - "level1": "food", - "level2": "baozi", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "steamer", - "level1": "container", - "level2": "steamer", + "object_name": "lemon", + "level1": "fruits", + "level2": "lemon", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-261093", - "dataset_size": "12.3GB", + "frame_range": "0-3544", + "dataset_size": "37.1MB", "statistics": { - "total_episodes": 495, - "total_frames": 261093, - "total_tasks": 6, - "total_videos": 1485, + "total_episodes": 19, + "total_frames": 3544, + "total_tasks": 1, + "total_videos": 19, "total_chunks": 1, - "chunks_size": 1000, + "chunks_size": 19, "fps": 30 }, - "dataset_uuid": "749fc774-003a-487c-948b-b6fc7ad307e1", + "dataset_uuid": "c37586ed-bc01-40b2-9038-cd26ce06df5b", "language": [ "en", "zh" @@ -52773,22 +49712,9 @@ "robotics" ], "sub_tasks": [ - "Grasp the baozi on the steamer with left gripper", - "Abnormal", - "Place the yellow baozi on the plate with left gripper", - "Place the yellow baozi on the plate with right gripper", "End", - "Grasp the dumpling with left gripper", - "use the left arm to grab the steamed stuffed bun that is closest to it", - "use the right arm to grab the steamed stuffed bun that is closest to it", - "use the right arm to put the steamed stuffed bun into the plate", - "Place the baozi on the plate with left gripper", - "Grasp the yellow baozi on the steamer with left gripper", - "use the left arm to put the steamed stuffed bun into the plate", - "Place the dumpling on the steamer with right gripper", - "Grasp the dumpling with right gripper", - "Grasp the yellow baozi on the steamer with right gripper", - "Place the dumpling on the steamer with left gripper", + "Place the lemon on the table with right gripper", + "Place the lemon on the table with left gripper", "null" ], "annotations": { @@ -52826,10 +49752,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Cobot_Magic_plate_storaje_baozi_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_plate_storaje_baozi_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "G1edu-u3_put_the_lemon_af_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_put_the_lemon_af_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_storage_apple_orange": { + "Airbot_MMK2_take_drink": { "task_categories": [ "robotics" ], @@ -52859,11 +49785,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_apple_orange", + "dataset_name": "Airbot_MMK2_take_drink", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "scene_level1", - "level2": "scene_level2", + "level1": "convenience", + "level2": "supermarket", "level3": null, "level4": null, "level5": null @@ -52871,25 +49797,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "apple", - "level1": "fruits", - "level2": "apple", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "orange", - "level1": "fruits", - "level2": "orange", + "object_name": "any_beverages", + "level1": "beverages", + "level2": "any_beverages", "level3": null, "level4": null, "level5": null }, { - "object_name": "storage_box", - "level1": "storage_utensils", - "level2": "storage_box", + "object_name": "paper_cup", + "level1": "cups", + "level2": "paper_cup", "level3": null, "level4": null, "level5": null @@ -52897,32 +49815,48 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the apple with left hand and put it in the storage box, and pick up the orange with right hand and put it in the storage box." + "pick up the drink by hand and put it on the table." ], "sub_tasks": [ { - "subtask": "Grasp the orange with the right gripper", + "subtask": "Place the coffee on the table with the left gripper", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Place the vitamin B water on the table with the left gripper", "subtask_index": 1 }, { - "subtask": "Place the orange into the right compartment of the storage box with the right gripper", + "subtask": "Abnormal", "subtask_index": 2 }, { - "subtask": "Grasp the apple with the left gripper", + "subtask": "Place the vitamin B water on the table with the right gripper", "subtask_index": 3 }, { - "subtask": "Place the apple into the left compartment of the storage box with the left gripper", + "subtask": "Place the coffee on the table with the right gripper", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "Grasp the vitamin B water on the white lid with the left gripper", "subtask_index": 5 + }, + { + "subtask": "Grasp the vitamin B water on the white lid with the right gripper", + "subtask_index": 6 + }, + { + "subtask": "End", + "subtask_index": 7 + }, + { + "subtask": "Grasp the coffee on the white lid with the right gripper", + "subtask_index": 8 + }, + { + "subtask": "null", + "subtask_index": 9 } ], "atomic_actions": [ @@ -52962,23 +49896,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 41, - "total_frames": 6657, + "total_episodes": 86, + "total_frames": 17345, "fps": 30, - "total_tasks": 6, - "total_videos": 164, + "total_tasks": 10, + "total_videos": 344, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "176.34 MB" + "dataset_size": "570.83 MB" }, - "frame_num": 6657, - "dataset_size": "176.34 MB", - "data_structure": "Airbot_MMK2_storage_apple_orange_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (29 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 17345, + "dataset_size": "570.83 MB", + "data_structure": "Airbot_MMK2_take_drink_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (74 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:40" + "train": "0:85" }, "features": { "observation.images.cam_head_rgb": { @@ -53332,20 +50266,20 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "R1_Lite_move_the_position_of_the_milk": { - "path": "R1_Lite_move_the_position_of_the_milk", - "dataset_name": "move_the_position_of_the_milk", + "Cobot_Magic_place_square_pyramid": { + "path": "Cobot_Magic_place_square_pyramid", + "dataset_name": "place_square_pyramid", "robot_type": "", "end_effector_type": [ "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ - "place", + "grasp", "pick", - "grasp" + "place" ], - "tasks": "Grasp the wangzai milk with left gripper", + "tasks": "Place the block onto the cube-shaped block", "objects": [ { "object_name": "table", @@ -53356,355 +50290,35 @@ "level5": null }, { - "object_name": "pen", - "level1": "office_supplies", - "level2": "pen", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "fruit", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bath_ball", - "level1": "daily_necessities", - "level2": "bath_ball", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "can", - "level1": "container", - "level2": "can", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "eraser", - "level1": "office_supplies", - "level2": "eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "hard_cleanser", - "level1": "daily_necessities", - "level2": "hard_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "long_bread", - "level1": "food", - "level2": "long_bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peeler", - "level1": "tool", - "level2": "peeler", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "block", - "level1": "toy", - "level2": "block", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "duck", + "object_name": "cube_block", "level1": "toy", - "level2": "duck", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soap", - "level1": "daily_necessities", - "level2": "soap", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "container", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "basket", - "level1": "container", - "level2": "basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cola", - "level1": "drink", - "level2": "cola", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "detergent", - "level1": "daily_necessities", - "level2": "detergent", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_beater", - "level1": "electrical_appliances", - "level2": "egg_beater", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "towel", - "level1": "clothing", - "level2": "towel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "orange", - "level1": "fruit", - "level2": "orange", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peach", - "level1": "fruit", - "level2": "peach", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "marker", - "level1": "office_supplies", - "level2": "marker", + "level2": "cube_block", "level3": null, "level4": null, "level5": null }, { - "object_name": "rubiks_cube", + "object_name": "square_pyramid", "level1": "toy", - "level2": "rubiks_cube", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bread_slice", - "level1": "food", - "level2": "bread_slice", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "brush", - "level1": "daily_necessities", - "level2": "brush", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yogurt", - "level1": "drink", - "level2": "yogurt", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "power_strip", - "level1": "electric_appliance", - "level2": "power_strip", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "milk", - "level1": "drink", - "level2": "milk", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soda", - "level1": "drink", - "level2": "soda", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "lime", - "level1": "fruit", - "level2": "lime", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "coffee_capsule", - "level1": "drink", - "level2": "coffee_capsule", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "dish", - "level1": "container", - "level2": "dish", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "glass", - "level1": "furniture", - "level2": "glass", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_yolk_pastry", - "level1": "food", - "level2": "egg_yolk_pastry", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "glasses_case", - "level1": "daily_necessities", - "level2": "glasses_case", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "gum", - "level1": "daily_necessities", - "level2": "gum", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tape", - "level1": "daily_necessities", - "level2": "tape", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soft_cleanser", - "level1": "daily_necessities", - "level2": "soft_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chips", - "level1": "food", - "level2": "chips", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chocolate", - "level1": "food", - "level2": "chocolate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cookie", - "level1": "food", - "level2": "cookie", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "spoon", - "level1": "tableware", - "level2": "spoon", + "level2": "square_pyramid", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-12898", - "dataset_size": "421.8MB", + "frame_range": "0-25851", + "dataset_size": "521.2MB", "statistics": { - "total_episodes": 59, - "total_frames": 12898, + "total_episodes": 99, + "total_frames": 25851, "total_tasks": 1, - "total_videos": 236, + "total_videos": 297, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "945013f7-9a63-4c59-af10-4ad4df23d67f", + "dataset_uuid": "53a89b6b-14be-4a0e-8495-f3310b2ba5bd", "language": [ "en", "zh" @@ -53713,13 +50327,10 @@ "robotics" ], "sub_tasks": [ - "Grasp the wangzai milk with left gripper", - "Place the wangzai milk on the table with right gripper", - "Static", - "Place the wangzai milk on the table with left gripper", - "Grasp the wangzai milk with right gripper", + "Place the block onto the cube-shaped block", + "Place the triangular block onto the cube block", "End", - "Abnormal", + "Grasp the triangular block", "null" ], "annotations": { @@ -53757,12 +50368,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_move_the_position_of_the_milk_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_move_the_position_of_the_milk_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Cobot_Magic_place_square_pyramid_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_place_square_pyramid_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "R1_Lite_place_the_dress_shirt_on_the_hanger": { - "path": "R1_Lite_place_the_dress_shirt_on_the_hanger", - "dataset_name": "place_the_dress_shirt_on_the_hanger", + "alpha_bot_2_carry_the_clothes_basket": { + "path": "alpha_bot_2_carry_the_clothes_basket", + "dataset_name": "carry_the_clothes_basket", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -53773,54 +50384,30 @@ "pick", "place" ], - "tasks": "abnormal", + "tasks": "Move the basket above the other table", "objects": [ { - "object_name": "hanger", - "level1": "furniture", - "level2": "hanger", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "shirt", - "level1": "clothing", - "level2": "shirt", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", + "object_name": "clothes_basket", "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bed", - "level1": "furniture", - "level2": "bed", + "level2": "clothes_basket", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-120516", - "dataset_size": "7.1GB", + "frame_range": "0-46527", + "dataset_size": "2.5GB", "statistics": { - "total_episodes": 103, - "total_frames": 120516, + "total_episodes": 50, + "total_frames": 46527, "total_tasks": 1, - "total_videos": 309, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "99bbe00a-c986-419f-b64a-51eef829a3b8", + "dataset_uuid": "0df48917-c10a-469f-8f74-d57f284c09cd", "language": [ "en", "zh" @@ -53829,12 +50416,14 @@ "robotics" ], "sub_tasks": [ - "abnormal", - "Hook the other side of the shirt onto the hanger", - "Hook one side of the shirt onto the hanger", - "Lift the shirt and hanger", - "Take the clothes hanger out of the clothes", - "Place the hanger on the bed", + "Move the basket above the other table", + "Grasp the basket with both grippers", + "Grasp the basket with left gripper", + "Grasp the basket with right gripper", + "End", + "Place the basket on the table with right gripper", + "Place the basket on the table with both grippers", + "Place the basket on the table with left gripper", "null" ], "annotations": { @@ -53872,42 +50461,125 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_place_the_dress_shirt_on_the_hanger_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_place_the_dress_shirt_on_the_hanger_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "alpha_bot_2_carry_the_clothes_basket_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "alpha_bot_2_carry_the_clothes_basket_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "R1_Lite_take_and_put_away_garden_stuff_a": { - "path": "R1_Lite_take_and_put_away_garden_stuff_a", - "dataset_name": "take_and_put_away_garden_stuff_a", + "AIRBOT_MMK2_place_the_basin": { + "path": "AIRBOT_MMK2_place_the_basin", + "dataset_name": "place_the_basin", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "five_finger_hand" ], "scene_type": [], "atomic_actions": [ "grasp", "pick", - "place", - "pull", - "push" + "place" ], - "tasks": "Grasp the apple and place it on the table with right gripper", + "tasks": "Static", "objects": [ { - "object_name": "refrigerator", + "object_name": "table", "level1": "furniture", - "level2": "refrigerator", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "drawer", + "object_name": "basin", "level1": "container", - "level2": "drawer", + "level2": "basin", "level3": null, "level4": null, "level5": null - }, + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-7672", + "dataset_size": "272.9MB", + "statistics": { + "total_episodes": 50, + "total_frames": 7672, + "total_tasks": 1, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "14a514fa-2a43-4b79-bcf6-c7d01e427623", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Static", + "Place the basin on the table with the right gripper", + "Grasp the basin with the right gripper", + "End", + "Lift the basin with the left gripper ", + "Grasp the basin with the left gripper ", + "Lift the basin with the right gripper", + "Place the basin on the table with the left gripper ", + "Abnormal", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AIRBOT_MMK2_place_the_basin_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_place_the_basin_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "G1edu-u3_basket_storage_apple_b": { + "path": "G1edu-u3_basket_storage_apple_b", + "dataset_name": "basket_storage_apple_b", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "pick", + "place" + ], + "tasks": "End", + "objects": [ { "object_name": "table", "level1": "furniture", @@ -53917,67 +50589,35 @@ "level5": null }, { - "object_name": "tray", - "level1": "container", - "level2": "tray", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "potato", - "level1": "food", - "level2": "potato", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "green_pepper", - "level1": "food", - "level2": "green_pepper", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pumpkin", - "level1": "food", - "level2": "pumpkin", + "object_name": "basket", + "level1": "home_storage", + "level2": "basket", "level3": null, "level4": null, "level5": null }, { "object_name": "apple", - "level1": "food", + "level1": "fruit", "level2": "apple", "level3": null, "level4": null, "level5": null - }, - { - "object_name": "orange", - "level1": "food", - "level2": "orange", - "level3": null, - "level4": null, - "level5": null } ], "operation_platform_height": null, - "frame_range": "0-355644", - "dataset_size": "15.9GB", + "frame_range": "0-118352", + "dataset_size": "2.3GB", "statistics": { - "total_episodes": 148, - "total_frames": 355644, + "total_episodes": 241, + "total_frames": 118352, "total_tasks": 1, - "total_videos": 444, + "total_videos": 723, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "7c763184-bc85-4d7e-b01b-85ba85cbb6f2", + "dataset_uuid": "8c0ab1ee-eb0f-48a0-a7be-ff698d2f4651", "language": [ "en", "zh" @@ -53986,39 +50626,9 @@ "robotics" ], "sub_tasks": [ - "Grasp the apple and place it on the table with right gripper", - "Place the green bell pepper in the lower drawer with right gripper", - "Put green pepper in the refrigerator drawer", - "Place the orange in the lower drawer with right gripper", - "Place the potato in the lower drawer with left gripper", - "Take out the potato and place it on the table", - "Open the refrigerator door with left gripper", - "Place the potato in the lower drawer with right gripper", - "Take out the apple and place it on the tray", - "Open the refrigerator door", "End", - "Close the refrigerator door", - "Close the lower drawer with the left gripper", - "Grasp the potato and place it on the table with left gripper", - "Open the lower refrigerator drawer", - "Close the middle refrigerator door with the right gripper", - "Place the apple in the lower drawer with right gripper", - "Put orange in the refrigerator drawer", - "Abnormal", - "Put pumpkin in the refrigerator drawer", - "Grasp the potato and place it on the table with right gripper", - "Close the lower refrigerator drawer", - "Place the green bell pepper in the lower drawer with left gripper", - "Put apple in the refrigerator drawer", - "Take out the pumpkin and place it on the table", - "Close the lower drawer with the right gripper", - "Grasp the orange and place it on the table with right gripper", - "Put potato in the refrigerator drawer", - "Take out the green pepper and place it on the table", - "Take out the orange and place it on the tray", - "Grasp the green bell pepper and place it on the table with right gripper", - "Take out the apple and place it on the table", - "Open the lower drawer with the right gripper", + "Place the grapes in the plate with left hand", + "Grasp the grapes with left hand", "null" ], "annotations": { @@ -54056,10 +50666,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_take_and_put_away_garden_stuff_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_take_and_put_away_garden_stuff_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "G1edu-u3_basket_storage_apple_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_basket_storage_apple_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_storage_tomato_potato": { + "Galaxea_R1_Lite_classify_object_six": { "task_categories": [ "robotics" ], @@ -54089,11 +50699,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_tomato_potato", + "dataset_name": "Galaxea_R1_Lite_classify_object_six", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "kitchen", + "level1": "commercial_convenience", + "level2": "supermarket", "level3": null, "level4": null, "level5": null @@ -54101,25 +50711,49 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "potato", - "level1": "vegetables", - "level2": "potato", + "object_name": "brown_basket", + "level1": "baskets", + "level2": "brown_basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "tomato", + "object_name": "yellow_basket", + "level1": "baskets", + "level2": "yellow_basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "any_fruits", + "level1": "fruits", + "level2": "any_fruits", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "any_vegetables", "level1": "vegetables", - "level2": "tomato", + "level2": "any_vegetables", "level3": null, "level4": null, "level5": null }, { - "object_name": "storage_box", - "level1": "storage_utensils", - "level2": "storage_box", + "object_name": "any_snacks", + "level1": "snacks", + "level2": "any_snacks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "any_bread", + "level1": "bread", + "level2": "any_bread", "level3": null, "level4": null, "level5": null @@ -54127,32 +50761,300 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the potato with left hand and put it in the storage box, and pick up the tomato with right hand and put it in the storage box." + "place the food in the right basket with the right gripper, and place the non food items in the left basket with the left gripper." ], "sub_tasks": [ { - "subtask": "Grasp the tomato with the right gripper", + "subtask": "Grasp the round bread and put it in the right basket", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Grasp the small basket containing bread and put it in the right basket", "subtask_index": 1 }, { - "subtask": "Place the potato into the left compartment of the storage box with the left gripper", + "subtask": "Grasp the lemon and put it in the right basket", "subtask_index": 2 }, { - "subtask": "Grasp the potato with the left gripper", + "subtask": "Grasp the egg yolk pastry and put it in the right basket", "subtask_index": 3 }, { - "subtask": "Place the tomato into the right compartment of the storage box with the right gripper", + "subtask": "Grasp the can and put it in the right basket", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "Grasp the apple and put it in the right basket", "subtask_index": 5 + }, + { + "subtask": "Grasp the long bread and put it in the right basket", + "subtask_index": 6 + }, + { + "subtask": "Grasp the canned cola and put it in the right basket", + "subtask_index": 7 + }, + { + "subtask": "Grasp the back scratcher and put it in the left basket", + "subtask_index": 8 + }, + { + "subtask": "Grasp the glasses case and put it in the left basket", + "subtask_index": 9 + }, + { + "subtask": "Grasp the peeler and put it in the left basket", + "subtask_index": 10 + }, + { + "subtask": "Grasp the cookie and put it in the right basket", + "subtask_index": 11 + }, + { + "subtask": "Grasp the yellow duck and put it in the right basket", + "subtask_index": 12 + }, + { + "subtask": "Grasp the cleaning agent and put it in the left basket", + "subtask_index": 13 + }, + { + "subtask": "Grasp the soft cleanser and put it in the left basket", + "subtask_index": 14 + }, + { + "subtask": "Grasp the waffle and put it in the right basket", + "subtask_index": 15 + }, + { + "subtask": "Grasp the grey towel and put it in the left basket", + "subtask_index": 16 + }, + { + "subtask": "Grasp the orange and put it in the right basket", + "subtask_index": 17 + }, + { + "subtask": "Grasp the brown towel and put it in the left basket", + "subtask_index": 18 + }, + { + "subtask": "Grasp the pen container and put it in the left basket", + "subtask_index": 19 + }, + { + "subtask": "Grasp the hard cleanser and put it in the left basket", + "subtask_index": 20 + }, + { + "subtask": "Grasp the canned cola and put it in the left basket", + "subtask_index": 21 + }, + { + "subtask": "Grasp the triangle cake and put it in the right basket", + "subtask_index": 22 + }, + { + "subtask": "Grasp the shower sphere and put it in the left basket", + "subtask_index": 23 + }, + { + "subtask": "Grasp the rubiks cube and put it in the right basket", + "subtask_index": 24 + }, + { + "subtask": "Grasp the broom and put it in the left basket", + "subtask_index": 25 + }, + { + "subtask": "Grasp the white eraser and put it in the left basket", + "subtask_index": 26 + }, + { + "subtask": "Grasp the milk and put it in the right basket", + "subtask_index": 27 + }, + { + "subtask": "Grasp the lime and put it in the right basket", + "subtask_index": 28 + }, + { + "subtask": "Grasp the brown towel and put it in the right basket", + "subtask_index": 29 + }, + { + "subtask": "Grasp the duck toys and put it in the left basket", + "subtask_index": 30 + }, + { + "subtask": "Grasp the power strip and put it in the left basket", + "subtask_index": 31 + }, + { + "subtask": "Grasp the red duck and put it in the left basket", + "subtask_index": 32 + }, + { + "subtask": "Grasp the shampoo and put it in the left basket", + "subtask_index": 33 + }, + { + "subtask": "Grasp the yellow duck and put it in the left basket", + "subtask_index": 34 + }, + { + "subtask": "Grasp the peach and put it in the right basket", + "subtask_index": 35 + }, + { + "subtask": "Abnormal", + "subtask_index": 36 + }, + { + "subtask": "Grasp the tea cup and put it in the left basket", + "subtask_index": 37 + }, + { + "subtask": "Grasp the pink marker and put it in the left basket", + "subtask_index": 38 + }, + { + "subtask": "Grasp the brush and put it in the left basket", + "subtask_index": 39 + }, + { + "subtask": "Grasp the washing liquid and put it in the left basket", + "subtask_index": 40 + }, + { + "subtask": "Place the rubiks cube in the center of the table", + "subtask_index": 41 + }, + { + "subtask": "Grasp the soap and put it in the left basket", + "subtask_index": 42 + }, + { + "subtask": "Grasp the grey towel and put it in the right basket", + "subtask_index": 43 + }, + { + "subtask": "Grasp the blue marker and put it in the left basket", + "subtask_index": 44 + }, + { + "subtask": "Grasp the black glass cup and put it in the left basket", + "subtask_index": 45 + }, + { + "subtask": "Grasp the bath ball and put it in the left basket", + "subtask_index": 46 + }, + { + "subtask": "Grasp the coke and put it in the right basket", + "subtask_index": 47 + }, + { + "subtask": "Grasp the potato chips and put it in the right basket", + "subtask_index": 48 + }, + { + "subtask": "Grasp the ballpoint pen and put it in the left basket", + "subtask_index": 49 + }, + { + "subtask": "Grasp the rubiks cube and put it in the left basket", + "subtask_index": 50 + }, + { + "subtask": "Grasp the square chewing gum and put it in the right basket", + "subtask_index": 51 + }, + { + "subtask": "Grasp the glasses case and put it in the right basket", + "subtask_index": 52 + }, + { + "subtask": "Grasp the banana and put it in the right basket", + "subtask_index": 53 + }, + { + "subtask": "Grasp the ad milk and put it in the right basket", + "subtask_index": 54 + }, + { + "subtask": "Grasp the soda water and put it in the right basket", + "subtask_index": 55 + }, + { + "subtask": "Grasp the peach doll and put it in the right basket", + "subtask_index": 56 + }, + { + "subtask": "Grasp the spoon and put it in the left basket", + "subtask_index": 57 + }, + { + "subtask": "Grasp the blue marker and put it in the right basket", + "subtask_index": 58 + }, + { + "subtask": "Grasp the blue cup and put it in the left basket", + "subtask_index": 59 + }, + { + "subtask": "Grasp the compass and put it in the left basket", + "subtask_index": 60 + }, + { + "subtask": "Grasp the chocolate and put it in the right basket", + "subtask_index": 61 + }, + { + "subtask": "Grasp the compass and put it in the right basket", + "subtask_index": 62 + }, + { + "subtask": "End", + "subtask_index": 63 + }, + { + "subtask": "Grasp the tape and put it in the left basket", + "subtask_index": 64 + }, + { + "subtask": "Grasp the peeler and put it in the right basket", + "subtask_index": 65 + }, + { + "subtask": "Grasp the yogurt and put it in the right basket", + "subtask_index": 66 + }, + { + "subtask": "Grasp the green lemon and put it in the right basket", + "subtask_index": 67 + }, + { + "subtask": "Grasp the black marker and put it in the left basket", + "subtask_index": 68 + }, + { + "subtask": "Grasp the round chewing gum and put it in the right basket", + "subtask_index": 69 + }, + { + "subtask": "Grasp the bread slice and put it in the right basket", + "subtask_index": 70 + }, + { + "subtask": "Grasp the square chewing gum and put it in the left basket", + "subtask_index": 71 + }, + { + "subtask": "null", + "subtask_index": 72 } ], "atomic_actions": [ @@ -54161,21 +51063,21 @@ "place" ], "robot_name": [ - "Airbot_MMK2" + "Galaxea_R1_Lite" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -54192,30 +51094,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 47, - "total_frames": 6005, + "total_episodes": 105, + "total_frames": 86401, "fps": 30, - "total_tasks": 6, - "total_videos": 188, + "total_tasks": 73, + "total_videos": 420, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, + "state_dim": 14, + "action_dim": 14, "camera_views": 4, - "dataset_size": "161.13 MB" + "dataset_size": "5.25 GB" }, - "frame_num": 6005, - "dataset_size": "161.13 MB", - "data_structure": "Airbot_MMK2_storage_tomato_potato_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 86401, + "dataset_size": "5.25 GB", + "data_structure": "Galaxea_R1_Lite_classify_object_six_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (93 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:46" + "train": "0:104" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -54224,8 +51126,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -54234,11 +51136,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_head_right_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -54247,8 +51149,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -54257,11 +51159,11 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -54270,8 +51172,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -54280,11 +51182,11 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -54293,8 +51195,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -54306,7 +51208,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -54321,36 +51223,14 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -54365,30 +51245,8 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "timestamp": { @@ -54539,6 +51397,66 @@ 2 ], "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" } }, "authors": { @@ -54560,9 +51478,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Galaxea_R1_Lite_arrange_baai_then_brain": { + "Agilex_Cobot_Magic_storage_orange_basket_left": { "task_categories": [ "robotics" ], @@ -54592,11 +51510,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_arrange_baai_then_brain", + "dataset_name": "Agilex_Cobot_Magic_storage_orange_basket_left", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "eduction", - "level2": "school", + "level1": "household", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -54604,87 +51522,81 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "early_education_toys", - "level1": "buiding_blocks", - "level2": "early_education_toys", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, - "level5:operation_platform_height": 77.2 + "level5": null + }, + { + "object_name": "brown_basket", + "level1": "home_storage", + "level2": "brown_basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange", + "level1": "food", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null } ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "use the gripper to find blocks with the letters b, a, a, and i on the table and arrange them into BAAI, then find r and N and turn the arranged baai into brian." + "left gripper grabs oranges and puts them in the basket." ], "sub_tasks": [ { - "subtask": "Abnormal", + "subtask": "End", "subtask_index": 0 }, { - "subtask": "Place the second block A in the third location", + "subtask": "Place the orange in the basket with left gripper", "subtask_index": 1 }, { - "subtask": "Place the block I in the fourth location", + "subtask": "Abnormal", "subtask_index": 2 }, { - "subtask": "Place the first block A in the second location", + "subtask": "Grasp the orange with left gripper", "subtask_index": 3 }, - { - "subtask": "Place the block B in the first location", - "subtask_index": 4 - }, - { - "subtask": "End", - "subtask_index": 5 - }, - { - "subtask": "Move the block A out of the second location", - "subtask_index": 6 - }, - { - "subtask": "Place the block N in the fifth location", - "subtask_index": 7 - }, - { - "subtask": "Place the block R in the second location", - "subtask_index": 8 - }, { "subtask": "null", - "subtask_index": 9 + "subtask_index": 4 } ], "atomic_actions": [ "grasp", - "pick", - "place" + "lift", + "lower" ], "robot_name": [ - "Galaxea_R1_Lite" + "Agilex_Cobot_Magic" ], "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", "cam_right_wrist_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -54695,53 +51607,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 56, - "total_frames": 44471, + "total_episodes": 99, + "total_frames": 40181, "fps": 30, - "total_tasks": 10, - "total_videos": 224, + "total_tasks": 5, + "total_videos": 297, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, - "camera_views": 4, - "dataset_size": "1.26 GB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "488.80 MB" }, - "frame_num": 44471, - "dataset_size": "1.26 GB", - "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_arrange_baai_then_brain_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (44 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 40181, + "dataset_size": "488.80 MB", + "data_structure": "Agilex_Cobot_Magic_storage_orange_basket_left_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:55" + "train": "0:98" }, "features": { - "observation.images.cam_head_left_rgb": { - "dtype": "video", - "shape": [ - 720, - 1280, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 720, - "video.width": 1280, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -54750,8 +51639,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -54763,7 +51652,7 @@ "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -54773,7 +51662,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -54786,7 +51675,7 @@ "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -54796,7 +51685,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -54809,7 +51698,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -54818,20 +51707,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -54840,14 +51741,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -54887,17 +51800,17 @@ }, "subtask_annotation": { "names": null, + "dtype": "int32", "shape": [ 5 - ], - "dtype": "int32" + ] }, "scene_annotation": { "names": null, + "dtype": "int32", "shape": [ 1 - ], - "dtype": "int32" + ] }, "eef_sim_pose_state": { "names": [ @@ -54914,10 +51827,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_sim_pose_action": { "names": [ @@ -54934,133 +51847,133 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" + ] }, "gripper_mode_state": { "names": [ "left_gripper_mode", "right_gripper_mode" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "gripper_mode_action": { "names": [ "left_gripper_mode", "right_gripper_mode" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "gripper_activity_state": { "names": [ "left_gripper_activity", "right_gripper_activity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "gripper_activity_action": { "names": [ "left_gripper_activity", "right_gripper_activity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" - } - }, - "authors": { + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { "contributed_by": [ { "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" @@ -55079,9 +51992,120 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Agilex_Cobot_Magic_open_drawer_bottom": { + "leju_robot_moving_parts_i": { + "path": "leju_robot_moving_parts_i", + "dataset_name": "moving_parts_i", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Move to the initial position of the workbench.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "parts", + "level1": "mechanical_parts", + "level2": "parts", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cabinet", + "level1": "home_storage", + "level2": "cabinet", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-229727", + "dataset_size": "14.0GB", + "statistics": { + "total_episodes": 155, + "total_frames": 229727, + "total_tasks": 1, + "total_videos": 465, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "c8b01b9f-9806-4a5a-a895-01df2cac436f", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Move to the initial position of the workbench.", + "Grasp the gray part with right gripper", + "Insert the small component into the corresponding slot on the workbench.", + "Move to the table behind body", + "Grasp the white part with right gripper", + "Place the gray part on the table with right gripper", + "Place the white part on the table with right gripper", + "End", + "Pick up the small component from the shelf.", + "Bring the small component to the front of the workbench.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_moving_parts_i_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_moving_parts_i_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Galaxea_R1_Lite_pour_water_black_tablecloth": { "task_categories": [ "robotics" ], @@ -55111,11 +52135,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_open_drawer_bottom", + "dataset_name": "Galaxea_R1_Lite_pour_water_black_tablecloth", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "office_workspace", - "level2": "office", + "level1": "household", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -55123,17 +52147,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "home_storage", - "level2": "table", + "object_name": "black_table_cloth", + "level1": "table_cloths", + "level2": "black_table_cloth", "level3": null, "level4": null, "level5": null }, { - "object_name": "three_layer_transparent_drawer", - "level1": "laboratory_supplies", - "level2": "three_layer_transparent_drawer", + "object_name": "any_cup", + "level1": "cups", + "level2": "any_cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "water", + "level1": "beverages", + "level2": "water", "level3": null, "level4": null, "level5": null @@ -55141,62 +52173,214 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "open the bottom drawer." + "use the gripper to pour the water into the other two cups on black table." ], "sub_tasks": [ { - "subtask": "end", + "subtask": "Pick up the gray plastic cup with left gripper", "subtask_index": 0 }, { - "subtask": "Pull open the drawer with the left gripper", + "subtask": "Pour water from blue cup to white cup with the left gripper", "subtask_index": 1 }, { - "subtask": "Abnormal", + "subtask": "Pour water from white cup to blue cup with the right gripper", "subtask_index": 2 }, { - "subtask": "Grab the bottom drawer with the right gripper", + "subtask": "Grasp the green cup with the left gripper", "subtask_index": 3 }, { - "subtask": "Pull open the drawer with the right gripper", + "subtask": "Place the blue plastic cup on the table with left gripper", "subtask_index": 4 }, { - "subtask": "Grab the bottom drawer with the left gripper", + "subtask": "Pour water from blue cup to green cup with the right gripper", "subtask_index": 5 }, { - "subtask": "null", + "subtask": "Pour water from white cup to green cup with the right gripper", "subtask_index": 6 + }, + { + "subtask": "Left gripper", + "subtask_index": 7 + }, + { + "subtask": "Pick up the green plastic cup with right gripper", + "subtask_index": 8 + }, + { + "subtask": "Pour water from blue cup to green cup with the left gripper", + "subtask_index": 9 + }, + { + "subtask": "Grasp the blue cup with the left gripper", + "subtask_index": 10 + }, + { + "subtask": "Pour water into the green plastic cup with right gripper", + "subtask_index": 11 + }, + { + "subtask": "Pour water into the blue plastic cup with left gripper", + "subtask_index": 12 + }, + { + "subtask": "Place the white cup with the right gripper", + "subtask_index": 13 + }, + { + "subtask": "Grasp the white cup with the right gripper", + "subtask_index": 14 + }, + { + "subtask": "Pick up the blue plastic cup with right gripper", + "subtask_index": 15 + }, + { + "subtask": "Place the white cup with the left gripper", + "subtask_index": 16 + }, + { + "subtask": "Place the blue cup with the right gripper", + "subtask_index": 17 + }, + { + "subtask": "Place the gray plastic cup on the table with left gripper", + "subtask_index": 18 + }, + { + "subtask": "Pick up the green plastic cup with left gripper", + "subtask_index": 19 + }, + { + "subtask": "Pour water into the blue plastic cup with right gripper", + "subtask_index": 20 + }, + { + "subtask": "Grasp the green cup with the right gripper", + "subtask_index": 21 + }, + { + "subtask": "Pick up the blue plastic cup with left gripper", + "subtask_index": 22 + }, + { + "subtask": "Grasp the white cup with the left gripper", + "subtask_index": 23 + }, + { + "subtask": "Pour water from white cup to green cup with the left gripper", + "subtask_index": 24 + }, + { + "subtask": "Pour water from blue cup to white cup with the right gripper", + "subtask_index": 25 + }, + { + "subtask": "Place the blue plastic cup on the table with right gripper", + "subtask_index": 26 + }, + { + "subtask": "Pour water from white cup to blue cup with the left gripper", + "subtask_index": 27 + }, + { + "subtask": "Pour water into the green plastic cup with left gripper", + "subtask_index": 28 + }, + { + "subtask": "Pour water from green cup to blue cup with the right gripper", + "subtask_index": 29 + }, + { + "subtask": "Place the green plastic cup on the table with left gripper", + "subtask_index": 30 + }, + { + "subtask": "Place the green plastic cup on the table with right gripper", + "subtask_index": 31 + }, + { + "subtask": "Grasp the blue cup with the right gripper", + "subtask_index": 32 + }, + { + "subtask": "Place the green cup with the right gripper", + "subtask_index": 33 + }, + { + "subtask": "Pour water from green cup to blue cup with the left gripper", + "subtask_index": 34 + }, + { + "subtask": "Place the green cup with the left gripper", + "subtask_index": 35 + }, + { + "subtask": "End", + "subtask_index": 36 + }, + { + "subtask": "Place the blue cup with the left gripper", + "subtask_index": 37 + }, + { + "subtask": "Pour water from green cup to white cup with the left gripper", + "subtask_index": 38 + }, + { + "subtask": "Right gripper", + "subtask_index": 39 + }, + { + "subtask": "Pour water into the gray plastic cup with right gripper", + "subtask_index": 40 + }, + { + "subtask": "Pour water into the gray plastic cup with left gripper", + "subtask_index": 41 + }, + { + "subtask": "Pour water from green cup to white cup with the right gripper", + "subtask_index": 42 + }, + { + "subtask": "null", + "subtask_index": 43 } ], "atomic_actions": [ "grasp", - "pull" + "pick", + "place", + "pour" ], "robot_name": [ - "Agilex_Cobot_Magic" + "Galaxea_R1_Lite" ], "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -55207,30 +52391,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 18620, + "total_episodes": 98, + "total_frames": 42644, "fps": 30, - "total_tasks": 7, - "total_videos": 150, + "total_tasks": 44, + "total_videos": 392, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "182.16 MB" + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "1.43 GB" }, - "frame_num": 18620, - "dataset_size": "182.16 MB", - "data_structure": "Agilex_Cobot_Magic_open_drawer_bottom_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 42644, + "dataset_size": "1.43 GB", + "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_pour_water_black_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:49" + "train": "0:97" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -55239,8 +52423,31 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -55252,7 +52459,7 @@ "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -55262,7 +52469,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -55275,7 +52482,7 @@ "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -55285,7 +52492,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -55298,7 +52505,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 26 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -55307,32 +52514,20 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_gripper_open", + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -55341,26 +52536,14 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_gripper_open", + "right_gripper_open" ] }, "timestamp": { @@ -55400,17 +52583,17 @@ }, "subtask_annotation": { "names": null, - "dtype": "int32", "shape": [ 5 - ] + ], + "dtype": "int32" }, "scene_annotation": { "names": null, - "dtype": "int32", "shape": [ 1 - ] + ], + "dtype": "int32" }, "eef_sim_pose_state": { "names": [ @@ -55427,10 +52610,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_sim_pose_action": { "names": [ @@ -55447,130 +52630,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" }, "gripper_mode_state": { "names": [ "left_gripper_mode", "right_gripper_mode" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "gripper_mode_action": { "names": [ "left_gripper_mode", "right_gripper_mode" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "gripper_activity_state": { "names": [ "left_gripper_activity", "right_gripper_activity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "gripper_activity_action": { "names": [ "left_gripper_activity", "right_gripper_activity" ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", "shape": [ 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" ], - "dtype": "float32", - "shape": [ - 2 - ] + "dtype": "int32" } }, "authors": { @@ -55592,9 +52775,255 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Airbot_MMK2_play_toy_piano": { + "R1_Lite_tableware_arrangement": { + "path": "R1_Lite_tableware_arrangement", + "dataset_name": "tableware_arrangement", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Put the chopsticks back from the plate", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chopsticks", + "level1": "tableware", + "level2": "chopsticks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "spoon", + "level1": "tableware", + "level2": "spoon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "container", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "container", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "lunch_box", + "level1": "container", + "level2": "lunch_box", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-16015", + "dataset_size": "628.5MB", + "statistics": { + "total_episodes": 8, + "total_frames": 16015, + "total_tasks": 1, + "total_videos": 24, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "85abe0d0-6c0a-432a-9dd0-14e2ca009399", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Put the chopsticks back from the plate", + "Put the bowl back from the plate", + "Put the spoon back from the bowl", + "Pick the chopsticks and place it on the plate", + "abnormal", + "Pick the bowl and place it on the plate", + "Pick the spoon and place it in the bowl", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_tableware_arrangement_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_tableware_arrangement_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "leju_robot_hotel_services_ad": { + "path": "leju_robot_hotel_services_ad", + "dataset_name": "hotel_services_ad", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "End", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "box", + "level1": "home_storage", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "card", + "level1": "nfc", + "level2": "card", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "sensor", + "level1": "electronic_products", + "level2": "sensor", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-73346", + "dataset_size": "4.7GB", + "statistics": { + "total_episodes": 430, + "total_frames": 73346, + "total_tasks": 1, + "total_videos": 1290, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "1bbfd926-a183-40a4-8649-0b9958b0ffcf", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Take out the room card with right gripper", + "Hand the room card to the guest with right gripper", + "Hand the room card to the target.", + "Pick up the room card from the card holder.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_hotel_services_ad_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_hotel_services_ad_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Agilex_Cobot_Magic_close_drawer_bottom": { "task_categories": [ "robotics" ], @@ -55624,11 +53053,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_play_toy_piano", + "dataset_name": "Agilex_Cobot_Magic_close_drawer_bottom", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "living_room", + "level1": "office_workspace", + "level2": "office", "level3": null, "level4": null, "level5": null @@ -55637,16 +53066,16 @@ "objects": [ { "object_name": "table", - "level1": "furniture", + "level1": "home_storage", "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "toy_piano", - "level1": "toy", - "level2": "toy_piano", + "object_name": "three_layer_transparent_drawer", + "level1": "laboratory_supplies", + "level2": "three_layer_transparent_drawer", "level3": null, "level4": null, "level5": null @@ -55654,62 +53083,50 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the piano in the middle with your left hand and play the piano with your right hand." + "close the bottom drawer." ], "sub_tasks": [ { - "subtask": "Play the toy piano with the right gripper", + "subtask": "End", "subtask_index": 0 }, { - "subtask": "Place the toy piano on the table with the left gripper", + "subtask": "Use the right gripper to contact the bottom layer of the storage cabinet", "subtask_index": 1 }, { - "subtask": "End", + "subtask": "Push the bottom drawer closed", "subtask_index": 2 }, - { - "subtask": "Grasp the toy piano with the left gripper", - "subtask_index": 3 - }, - { - "subtask": "Lift the toy piano with the left gripper", - "subtask_index": 4 - }, { "subtask": "null", - "subtask_index": 5 + "subtask_index": 3 } ], "atomic_actions": [ "grasp", - "pick", - "place", - "press" + "push" ], "robot_name": [ - "Airbot_MMK2" + "Agilex_Cobot_Magic" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -55721,20 +53138,20 @@ ], "statistics": { "total_episodes": 50, - "total_frames": 7683, + "total_frames": 16270, "fps": 30, - "total_tasks": 6, - "total_videos": 200, + "total_tasks": 4, + "total_videos": 150, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "230.15 MB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "157.62 MB" }, - "frame_num": 7683, - "dataset_size": "230.15 MB", - "data_structure": "Airbot_MMK2_play_toy_piano_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 16270, + "dataset_size": "157.62 MB", + "data_structure": "Agilex_Cobot_Magic_close_drawer_bottom_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { "train": "0:49" }, @@ -55808,33 +53225,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, "observation.state": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -55843,42 +53237,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -55887,36 +53271,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -55956,17 +53330,17 @@ }, "subtask_annotation": { "names": null, + "dtype": "int32", "shape": [ 5 - ], - "dtype": "int32" + ] }, "scene_annotation": { "names": null, + "dtype": "int32", "shape": [ 1 - ], - "dtype": "int32" + ] }, "eef_sim_pose_state": { "names": [ @@ -55983,10 +53357,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_sim_pose_action": { "names": [ @@ -56003,83 +53377,143 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" ], - "dtype": "int32" - } - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" - } - ] - }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", "support_info": "For technical support, please open an issue on our GitHub repository.", "license_details": "apache-2.0", @@ -56088,22 +53522,21 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Cobot_Magic_clean_up_the_tableware": { - "path": "Cobot_Magic_clean_up_the_tableware", - "dataset_name": "clean_up_the_tableware", + "G1edu-u3_pick_metal_bowl_ab": { + "path": "G1edu-u3_pick_metal_bowl_ab", + "dataset_name": "pick_metal_bowl_ab", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "three_finger_hand" ], "scene_type": [], "atomic_actions": [ "grasp", - "pick", - "place" + "pick" ], - "tasks": "use the left arm to grab a blue teacup", + "tasks": "Grasp the metal bowl and lift it to the center of the view with left gripper", "objects": [ { "object_name": "table", @@ -56114,59 +53547,27 @@ "level5": null }, { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tablecloth", - "level1": "clothing", - "level2": "tablecloth", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cup", - "level1": "container", - "level2": "cup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "spoon", - "level1": "tableware", - "level2": "spoon", + "object_name": "metal_bowl", + "level1": "bowl", + "level2": "metal_bowl", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-18127", - "dataset_size": "1.0GB", + "operation_platform_height": null, + "frame_range": "0-11322", + "dataset_size": "158.4MB", "statistics": { - "total_episodes": 33, - "total_frames": 18127, + "total_episodes": 36, + "total_frames": 11322, "total_tasks": 1, - "total_videos": 99, + "total_videos": 36, "total_chunks": 1, - "chunks_size": 1000, + "chunks_size": 37, "fps": 30 }, - "dataset_uuid": "ad865471-39ce-4343-92f4-8dcc8ed48830", + "dataset_uuid": "988d81c9-a23a-4a67-9613-aed7fcecf7e3", "language": [ "en", "zh" @@ -56175,33 +53576,9 @@ "robotics" ], "sub_tasks": [ - "use the left arm to grab a blue teacup", - "Grasp the bowl with the left gripper", - "use the right arm to grab a blue rice bowl", - "Place the bowl on the plate with the left gripper", - "use the left arm to put the blue soup spoon into the blue teacup in the middle", - "Grasp the spoon with the left gripper", - "Place the spoon on the cup with the left gripper", - "use the right arm to put the blue soup spoon on the blue large plate in the middle", - "use the left arm to put the blue teacup into the blue rice bowl in the middle", - "Grasp the cup with the left gripper", - "use the right arm to put the blue soup spoon into the blue teacup in the middle", - "use the right arm to put the blue teacup into the blue rice bowl in the middle", - "use the left arm to grab a blue rice bowl", - "use the right arm to grab a blue teacup", - "Place the spoon on the cup with the right gripper", - "use the right arm to put the blue rice bowl into the blue large plate in the middle", - "abnormal", - "Grasp the cup with the right gripper", - "use the right arm to grab a blue soup spoon", - "Grasp the bowl with the right gripper", - "Grasp the spoon with the right gripper", - "Place the cup on the bowl with the right gripper", - "Place the cup on the bowl with the left gripper", - "use the left arm to grab a blue soup spoon", + "Grasp the metal bowl and lift it to the center of the view with left gripper", "End", - "Place the bowl on the plate with the right gripper", - "use the left arm to put the blue rice bowl into the blue large plate in the middle", + "Grasp the metal bowl and lift it to the center of the view with right gripper", "null" ], "annotations": { @@ -56239,10 +53616,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Cobot_Magic_clean_up_the_tableware_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_clean_up_the_tableware_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "G1edu-u3_pick_metal_bowl_ab_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_pick_metal_bowl_ab_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_take_toy_car": { + "Airbot_MMK2_organize_plate": { "task_categories": [ "robotics" ], @@ -56272,11 +53649,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_take_toy_car", + "dataset_name": "Airbot_MMK2_organize_plate", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "other", - "level2": "laboratory", + "level1": "household", + "level2": "kitichen", "level3": null, "level4": null, "level5": null @@ -56284,17 +53661,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "toy_car", - "level1": "doll", - "level2": "toy_car", + "object_name": "round_plate", + "level1": "kitchen_supplies", + "level2": "plates", "level3": null, "level4": null, "level5": null }, { - "object_name": "white_small_plate", - "level1": "plates", - "level2": "white_small_plate", + "object_name": "storage_rack", + "level1": "home_storage", + "level2": "storage_racks", "level3": null, "level4": null, "level5": null @@ -56302,36 +53679,40 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "take the toy car out of the plate by hand and place it on the table." + "use both hands to place the stacked plates on the table onto the shelf." ], "sub_tasks": [ { - "subtask": "Grasp the toy car on the plate and with the left gripper", + "subtask": "Press the plate with the left gripper", "subtask_index": 0 }, { - "subtask": "Grasp the toy car on the plate and with the right gripper", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "Abnormal", + "subtask": "Place the plate into the front mezzanine of the shelf with the right gripper", "subtask_index": 2 }, { - "subtask": "End", + "subtask": "Press the plate and push it to the right with the left gripper", "subtask_index": 3 }, { - "subtask": "Place the toy car on the table with the right gripper", + "subtask": "Abnormal", "subtask_index": 4 }, { - "subtask": "Place the toy car on the table with the left gripper", + "subtask": "Place the plate into the middle mezzanine of the shelf with the right gripper", "subtask_index": 5 }, { - "subtask": "null", + "subtask": "Grasp the plate with the right gripper", "subtask_index": 6 + }, + { + "subtask": "null", + "subtask_index": 7 } ], "atomic_actions": [ @@ -56371,23 +53752,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 49, - "total_frames": 14333, + "total_episodes": 595, + "total_frames": 338107, "fps": 30, - "total_tasks": 7, - "total_videos": 196, + "total_tasks": 8, + "total_videos": 2380, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "457.72 MB" + "dataset_size": "14.21 GB" }, - "frame_num": 14333, - "dataset_size": "457.72 MB", - "data_structure": "Airbot_MMK2_take_toy_car_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 338107, + "dataset_size": "14.21 GB", + "data_structure": "Airbot_MMK2_organize_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (583 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:48" + "train": "0:594" }, "features": { "observation.images.cam_head_rgb": { @@ -56741,20 +54122,20 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "G1edu-u3_pick_up_the_bread_az": { - "path": "G1edu-u3_pick_up_the_bread_az", - "dataset_name": "pick_up_the_bread_az", + "AgiBot-g1_remove_the_accessory": { + "path": "AgiBot-g1_remove_the_accessory", + "dataset_name": "remove_the_accessory", "robot_type": "", "end_effector_type": [ - "three_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ "grasp", - "pick", - "place" + "place", + "pick" ], - "tasks": "End", + "tasks": "Place the hard drive bracket into the box.", "objects": [ { "object_name": "table", @@ -56765,83 +54146,35 @@ "level5": null }, { - "object_name": "basket", - "level1": "home_storage", - "level2": "basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bread", - "level1": "food", - "level2": "bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bowl", - "level1": "bowl", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "fruit", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "grape", - "level1": "fruits", - "level2": "grape", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "orange", - "level1": "fruits", - "level2": "orange", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pear", - "level1": "fruits", - "level2": "pear", + "object_name": "box", + "level1": "container", + "level2": "box", "level3": null, "level4": null, "level5": null }, { - "object_name": "apple", - "level1": "fruit", - "level2": "apple", + "object_name": "accessories", + "level1": "industrial_parts", + "level2": "accessories", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-14542", - "dataset_size": "169.7MB", + "frame_range": "0-27353", + "dataset_size": "14.8GB", "statistics": { - "total_episodes": 26, - "total_frames": 14542, + "total_episodes": 68, + "total_frames": 27353, "total_tasks": 1, - "total_videos": 26, + "total_videos": 544, "total_chunks": 1, - "chunks_size": 27, + "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "95b46832-04a8-4073-b033-dbe0d38fe742", + "dataset_uuid": "fcba76dc-2672-4dc1-9b8e-b375ede64077", "language": [ "en", "zh" @@ -56850,10 +54183,8 @@ "robotics" ], "sub_tasks": [ - "End", - "Grasp the long bread and lift it to the center of the view with left gripper", - "Grasp the long bread and lift it to the center of the view with right gripper", - "Abnormal", + "Place the hard drive bracket into the box.", + "Pick up the hard drive bracket from the feeding area.", "null" ], "annotations": { @@ -56891,238 +54222,388 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "G1edu-u3_pick_up_the_bread_az_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_pick_up_the_bread_az_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AgiBot-g1_remove_the_accessory_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_remove_the_accessory_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Galaxea_R1_Lite_storage_object_brown_plate": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" + "leju_robot_box_storage_parcel_g": { + "path": "leju_robot_box_storage_parcel_g", + "dataset_name": "box_storage_parcel_g", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_storage_object_brown_plate", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "household", - "level2": "living_room", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "tasks": "Place the package into the parcel locker.", "objects": [ { - "object_name": "brown_plate", - "level1": "plates", - "level2": "brown_plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "fruits", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bathing_in_flowers", - "level1": "daily_chemical_products", - "level2": "bathing_in_flowers", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "blue_cup", - "level1": "cups", - "level2": "blue_cup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "blue_pot", - "level1": "cookware", - "level2": "blue_pot", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "toast_slices", - "level1": "bread", - "level2": "toast_slices", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "brown_towel", - "level1": "towels", - "level2": "brown_towel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "can", - "level1": "snacks", - "level2": "can", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "coke(slim_can)", - "level1": "beverages", - "level2": "coke(slim_can)", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "potato_chips", - "level1": "snacks", - "level2": "potato_chips", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chocolate", - "level1": "snacks", - "level2": "chocolate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "compass", - "level1": "rulers", - "level2": "compass", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "block_pillar", - "level1": "building_blocks", - "level2": "block_pillar", + "object_name": "box", + "level1": "home_storage", + "level2": "box", "level3": null, "level4": null, "level5": null }, { - "object_name": "egg_beater", - "level1": "spoons_and_spatulas", - "level2": "egg_beater", + "object_name": "parcel", + "level1": "container", + "level2": "parcel", "level3": null, "level4": null, "level5": null }, { - "object_name": "erasers", - "level1": "stationery", - "level2": "erasers", + "object_name": "conveyor_belt", + "level1": "industrial_equipment", + "level2": "conveyor_belt", "level3": null, "level4": null, "level5": null - }, + } + ], + "operation_platform_height": null, + "frame_range": "0-182793", + "dataset_size": "9.4GB", + "statistics": { + "total_episodes": 492, + "total_frames": 182793, + "total_tasks": 1, + "total_videos": 1476, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "1b130f08-2c4e-4724-ad03-8ecda43684fd", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the package into the parcel locker.", + "Pick up the package from the inbound machine.", + "Pick up the package from the conveyor belt.", + "Place the package onto the inbound machine.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_box_storage_parcel_g_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_box_storage_parcel_g_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "AIRBOT_MMK2_place_the_small_bowl_of_canned_food": { + "path": "AIRBOT_MMK2_place_the_small_bowl_of_canned_food", + "dataset_name": "place_the_small_bowl_of_canned_food", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "End", + "objects": [ { - "object_name": "chewing_gum", - "level1": "snacks", - "level2": "chewing_gum", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "mentholatum_facial_cleanser", - "level1": "daily_chemical_products", - "level2": "mentholatum_facial_cleanser", + "object_name": "mini_table", + "level1": "furniture", + "level2": "mini_table", "level3": null, "level4": null, "level5": null }, { - "object_name": "green_lemon", - "level1": "fruits", - "level2": "green_lemon", + "object_name": "small_bowl_of_canned_food", + "level1": "food", + "level2": "small_bowl_of_canned_food", "level3": null, "level4": null, "level5": null - }, + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-7610", + "dataset_size": "313.5MB", + "statistics": { + "total_episodes": 50, + "total_frames": 7610, + "total_tasks": 1, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "a9c7e711-c375-4aa9-8ce2-93c6f016b80d", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Abnormal", + "Place the small bowl of canned food on the table with the right gripper", + "Grasp the small bowl of canned food with right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AIRBOT_MMK2_place_the_small_bowl_of_canned_food_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_place_the_small_bowl_of_canned_food_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "G1edu-u3_pullBowl_storage_bread_unordered_a": { + "path": "G1edu-u3_pullBowl_storage_bread_unordered_a", + "dataset_name": "pullBowl_storage_bread_unordered_a", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Abnormal", + "objects": [ { - "object_name": "peach", - "level1": "fruits", - "level2": "peach", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "power_strip", - "level1": "electrical_control_equipment", - "level2": "power_strip", + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", "level3": null, "level4": null, "level5": null }, { - "object_name": "round_bread", - "level1": "bread", - "level2": "round_bread", + "object_name": "towel", + "level1": "daily_necessities", + "level2": "towel", "level3": null, "level4": null, "level5": null - }, + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-158699", + "dataset_size": "3.3GB", + "statistics": { + "total_episodes": 188, + "total_frames": 158699, + "total_tasks": 1, + "total_videos": 564, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "11ed8a7f-106a-4c08-98a4-0abdd1b97a49", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Abnormal", + "Place the long bread in pink bowl with left hand", + "End", + "Grasp the round bread with left hand", + "Grasp the long bread with left hand", + "Place the round bread in pink bowl with left hand", + "Move the pink bowl to the center of table with right hand", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_pullBowl_storage_bread_unordered_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_pullBowl_storage_bread_unordered_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_swap_cake_pumpkin_plate": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ { - "object_name": "mentholatum_facial_cleanser", - "level1": "daily_chemical_products", - "level2": "mentholatum_facial_cleanser", - "level3": null, - "level4": null, - "level5": null + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_swap_cake_pumpkin_plate", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "kitchen", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ { - "object_name": "square_building_blocks", - "level1": "building_blocks", - "level2": "square_building_blocks", + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", "level3": null, "level4": null, "level5": null }, { - "object_name": "tape", - "level1": "stationery", - "level2": "tape", + "object_name": "pumpkin", + "level1": "vegetables", + "level2": "pumpkin", "level3": null, "level4": null, "level5": null @@ -57134,388 +54615,36 @@ "level3": null, "level4": null, "level5": null - }, - { - "object_name": "duck", - "level1": "doll", - "level2": "duck", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "ambrosial_yogurt", - "level1": "beverages", - "level2": "ambrosial_yogurt", - "level3": null, - "level4": null, - "level5": null } ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "use a gripper to pick the target object and place on the brown plate." + "take the cake out with left hand and put the pumpkin in with right hand." ], "sub_tasks": [ { - "subtask": "Place the blue pot on the brown plate with the right gripper", + "subtask": "Grasp the cake with the left gripper", "subtask_index": 0 }, { - "subtask": "Grasp the blue pot with the left gripper", + "subtask": "Grasp the pumpkinx with the right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the plugboard with the left gripper", + "subtask": "End", "subtask_index": 2 }, { - "subtask": "Place the soft facial cleanser on the brown plate with the right gripper", + "subtask": "Place the cake on the table with the left gripper", "subtask_index": 3 }, { - "subtask": "Place the back scratcher on the brown plate with the right gripper", + "subtask": "Place the pumpkin into the plate with the right gripper", "subtask_index": 4 }, - { - "subtask": "Place the blackboard erasure on the brown plate with the left gripper", - "subtask_index": 5 - }, - { - "subtask": "Grasp the potato chips with the right gripper", - "subtask_index": 6 - }, - { - "subtask": "Grasp the banana with the left gripper", - "subtask_index": 7 - }, - { - "subtask": "Place the coke on the brown plate with the right gripper", - "subtask_index": 8 - }, - { - "subtask": "Place the chocolate on the brown plate with the left gripper", - "subtask_index": 9 - }, - { - "subtask": "Place the duck toy on the brown plate with the right gripper", - "subtask_index": 10 - }, - { - "subtask": "Grasp the compasses with the right gripper", - "subtask_index": 11 - }, - { - "subtask": "Place the peach on the brown plate with the left gripper", - "subtask_index": 12 - }, - { - "subtask": "Grasp the duck toy with the left gripper", - "subtask_index": 13 - }, - { - "subtask": "Place the round wooden block on the brown plate with the right gripper", - "subtask_index": 14 - }, - { - "subtask": "Grasp the blue cup with the left gripper", - "subtask_index": 15 - }, - { - "subtask": "Place the green lemon on the brown plate with the left gripper", - "subtask_index": 16 - }, - { - "subtask": "Place the shower sphere on the brown plate with the left gripper", - "subtask_index": 17 - }, - { - "subtask": "Place the shower sphere on the brown plate with the right gripper", - "subtask_index": 18 - }, - { - "subtask": "Grasp the back scratcher with the right gripper", - "subtask_index": 19 - }, - { - "subtask": "Grasp the square chewing gum with the left gripper", - "subtask_index": 20 - }, - { - "subtask": "Grasp the chocolate cake with the right gripper", - "subtask_index": 21 - }, - { - "subtask": "Place the yogurt on the brown plate with the right gripper", - "subtask_index": 22 - }, - { - "subtask": "Place the banana on the brown plate with the left gripper", - "subtask_index": 23 - }, - { - "subtask": "Grasp the shower sphere with the left gripper", - "subtask_index": 24 - }, - { - "subtask": "Place the brown towel on the brown plate with the left gripper", - "subtask_index": 25 - }, - { - "subtask": "Grasp the plugboard with the right gripper", - "subtask_index": 26 - }, - { - "subtask": "Grasp the yogurt with the right gripper", - "subtask_index": 27 - }, - { - "subtask": "Place the blue cup on the brown plate with the right gripper", - "subtask_index": 28 - }, - { - "subtask": "Place the plugboard on the brown plate with the right gripper", - "subtask_index": 29 - }, - { - "subtask": "Grasp the brown towel with the left gripper", - "subtask_index": 30 - }, - { - "subtask": "Grasp the hard facial cleanser with the left gripper", - "subtask_index": 31 - }, - { - "subtask": "Place the duck toy on the brown plate with the left gripper", - "subtask_index": 32 - }, - { - "subtask": "Place the round bread on the brown plate with the right gripper", - "subtask_index": 33 - }, - { - "subtask": "Grasp the brown towel with the right gripper", - "subtask_index": 34 - }, - { - "subtask": "Place the bread slice on the brown plate with the left gripper", - "subtask_index": 35 - }, - { - "subtask": "Grasp the hard facial cleanser with the right gripper", - "subtask_index": 36 - }, - { - "subtask": "Place the chocolate cake on the brown plate with the right gripper", - "subtask_index": 37 - }, - { - "subtask": "Grasp the peach with the left gripper", - "subtask_index": 38 - }, - { - "subtask": "Place the tin on the brown plate with the right gripper", - "subtask_index": 39 - }, - { - "subtask": "Place the tape on the brown plate with the right gripper", - "subtask_index": 40 - }, - { - "subtask": "Place the blackboard erasure on the brown plate with the right gripper", - "subtask_index": 41 - }, - { - "subtask": "Grasp the bread slice with the right gripper", - "subtask_index": 42 - }, - { - "subtask": "Place the potato chips on the brown plate with the right gripper", - "subtask_index": 43 - }, - { - "subtask": "Grasp the potato chips with the left gripper", - "subtask_index": 44 - }, - { - "subtask": "Place the tape on the brown plate with the left gripper", - "subtask_index": 45 - }, - { - "subtask": "Grasp the duck toy with the right gripper", - "subtask_index": 46 - }, - { - "subtask": "End", - "subtask_index": 47 - }, - { - "subtask": "Grasp the blackboard erasure with the left gripper", - "subtask_index": 48 - }, - { - "subtask": "Grasp the round wooden block with the left gripper", - "subtask_index": 49 - }, - { - "subtask": "Place the brown towel on the brown plate with the right gripper", - "subtask_index": 50 - }, - { - "subtask": "Place the blue cup on the brown plate with the left gripper", - "subtask_index": 51 - }, - { - "subtask": "Place the compasses on the brown plate with the right gripper", - "subtask_index": 52 - }, - { - "subtask": "Grasp the compasses with the left gripper", - "subtask_index": 53 - }, - { - "subtask": "Place the compasses on the brown plate with the left gripper", - "subtask_index": 54 - }, - { - "subtask": "Grasp the blue pot with the right gripper", - "subtask_index": 55 - }, - { - "subtask": "Grasp the round bread with the right gripper", - "subtask_index": 56 - }, - { - "subtask": "Grasp the chocolate cake with the left gripper", - "subtask_index": 57 - }, - { - "subtask": "Place the potato chips on the brown plate with the left gripper", - "subtask_index": 58 - }, - { - "subtask": "Place the plugboard on the brown plate with the left gripper", - "subtask_index": 59 - }, - { - "subtask": "Place the square chewing gum on the brown plate with the right gripper", - "subtask_index": 60 - }, - { - "subtask": "Place the banana on the brown plate with the right gripper", - "subtask_index": 61 - }, - { - "subtask": "Grasp the tin with the right gripper", - "subtask_index": 62 - }, - { - "subtask": "Place the hard facial cleanser on the brown plate with the left gripper", - "subtask_index": 63 - }, - { - "subtask": "Place the square wooden block on the brown plate with the left gripper", - "subtask_index": 64 - }, - { - "subtask": "Place the square chewing gum on the brown plate with the left gripper", - "subtask_index": 65 - }, - { - "subtask": "Grasp the tape with the right gripper", - "subtask_index": 66 - }, - { - "subtask": "Grasp the coke with the right gripper", - "subtask_index": 67 - }, - { - "subtask": "Grasp the tape with the left gripper", - "subtask_index": 68 - }, - { - "subtask": "Grasp the square chewing gum with the right gripper", - "subtask_index": 69 - }, - { - "subtask": "Grasp the shower sphere with the right gripper", - "subtask_index": 70 - }, - { - "subtask": "Place the square wooden block on the brown plate with the right gripper", - "subtask_index": 71 - }, - { - "subtask": "Place the hard facial cleanser on the brown plate with the right gripper", - "subtask_index": 72 - }, - { - "subtask": "Grasp the bread slice with the left gripper", - "subtask_index": 73 - }, - { - "subtask": "Grasp the blackboard erasure with the right gripper", - "subtask_index": 74 - }, - { - "subtask": "Place the bread slice on the brown plate with the right gripper", - "subtask_index": 75 - }, - { - "subtask": "Grasp the round bread with the left gripper", - "subtask_index": 76 - }, - { - "subtask": "Grasp the chocolate with the left gripper", - "subtask_index": 77 - }, - { - "subtask": "Grasp the square wooden block with the left gripper", - "subtask_index": 78 - }, - { - "subtask": "Grasp the blue cup with the right gripper", - "subtask_index": 79 - }, - { - "subtask": "Grasp the soft facial cleanser with the right gripper", - "subtask_index": 80 - }, - { - "subtask": "Place the chocolate cake on the brown plate with the left gripper", - "subtask_index": 81 - }, - { - "subtask": "Grasp the square wooden block with the right gripper", - "subtask_index": 82 - }, - { - "subtask": "Grasp the green lemon with the left gripper", - "subtask_index": 83 - }, - { - "subtask": "Place the round wooden block on the brown plate with the left gripper", - "subtask_index": 84 - }, - { - "subtask": "Place the round bread on the brown plate with the left gripper", - "subtask_index": 85 - }, - { - "subtask": "Place the blue pot on the brown plate with the left gripper", - "subtask_index": 86 - }, - { - "subtask": "Grasp the round wooden block with the right gripper", - "subtask_index": 87 - }, - { - "subtask": "Grasp the banana with the right gripper", - "subtask_index": 88 - }, { "subtask": "null", - "subtask_index": 89 + "subtask_index": 5 } ], "atomic_actions": [ @@ -57524,21 +54653,21 @@ "place" ], "robot_name": [ - "Galaxea_R1_Lite" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -57555,30 +54684,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 102, - "total_frames": 16390, + "total_episodes": 50, + "total_frames": 13761, "fps": 30, - "total_tasks": 90, - "total_videos": 408, + "total_tasks": 6, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, + "state_dim": 36, + "action_dim": 36, "camera_views": 4, - "dataset_size": "615.40 MB" + "dataset_size": "399.28 MB" }, - "frame_num": 16390, - "dataset_size": "615.40 MB", - "data_structure": "Galaxea_R1_Lite_storage_object_brown_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (90 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 13761, + "dataset_size": "399.28 MB", + "data_structure": "Airbot_MMK2_swap_cake_pumpkin_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:101" + "train": "0:49" }, "features": { - "observation.images.cam_head_left_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -57587,8 +54716,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -57597,11 +54726,11 @@ "has_audio": false } }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -57610,8 +54739,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -57620,11 +54749,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -57633,8 +54762,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -57643,11 +54772,11 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_front_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -57656,8 +54785,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -57669,7 +54798,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -57684,14 +54813,36 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -57706,8 +54857,30 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -57858,66 +55031,6 @@ 2 ], "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" } }, "authors": { @@ -57939,20 +55052,19 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Cobot_Magic_move_beverage": { - "path": "Cobot_Magic_move_beverage", - "dataset_name": "move_beverage", + "G1edu-u3_pick_crumpled_paper_aa": { + "path": "G1edu-u3_pick_crumpled_paper_aa", + "dataset_name": "pick_crumpled_paper_aa", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "three_finger_hand" ], "scene_type": [], "atomic_actions": [ "grasp", - "pick", - "place" + "pick" ], "tasks": "End", "objects": [ @@ -57965,141 +55077,27 @@ "level5": null }, { - "object_name": "green_bottle_beverage", - "level1": "drink", - "level2": "green_bottle_beverage", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "red_bottle_beverage", - "level1": "drink", - "level2": "red_bottle_beverage", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "black_bottle_beverage", - "level1": "drink", - "level2": "black_bottle_beverage", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-31813", - "dataset_size": "636.5MB", - "statistics": { - "total_episodes": 100, - "total_frames": 31813, - "total_tasks": 1, - "total_videos": 300, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "b6d0e7b1-1a5c-4188-bdca-cab2a7c613ed", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "End", - "Grasp the beverage bottle placed at the back of the desk", - "Place the drink at the front of the table", - "Pick up the drink", - "Place the beverage bottle to the front of the table", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_move_beverage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_move_beverage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "R1_Lite_put_the_shoes_into_the_shoe_box": { - "path": "R1_Lite_put_the_shoes_into_the_shoe_box", - "dataset_name": "put_the_shoes_into_the_shoe_box", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Put the shoes out of the shoe box", - "objects": [ - { - "object_name": "shoes", - "level1": "clothing", - "level2": "shoes", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "shoe_box", - "level1": "container", - "level2": "shoe_box", + "object_name": "paper", + "level1": "rubbish", + "level2": "paper", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-87219", - "dataset_size": "3.8GB", + "frame_range": "0-4485", + "dataset_size": "58.9MB", "statistics": { - "total_episodes": 106, - "total_frames": 87219, + "total_episodes": 13, + "total_frames": 4485, "total_tasks": 1, - "total_videos": 318, + "total_videos": 13, "total_chunks": 1, - "chunks_size": 1000, + "chunks_size": 13, "fps": 30 }, - "dataset_uuid": "87e16c2a-8d5b-4e0b-9706-cfae7e2b29e9", + "dataset_uuid": "b6576ff7-68ba-498d-a50c-c97bad67554e", "language": [ "en", "zh" @@ -58108,11 +55106,10 @@ "robotics" ], "sub_tasks": [ - "Put the shoes out of the shoe box", - "abnormal", - "Put the shoes into the shoe box", - "Open the shoe box", - "Close the shoe box", + "End", + "Grasp the paper ball and lift it to the center of the view with right gripper  ", + "Grasp the paper ball and lift it to the center of the view with left gripper ", + "Abnormal", "null" ], "annotations": { @@ -58150,10 +55147,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_put_the_shoes_into_the_shoe_box_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_put_the_shoes_into_the_shoe_box_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "G1edu-u3_pick_crumpled_paper_aa_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_pick_crumpled_paper_aa_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Agilex_Cobot_Magic_move_mouse": { + "Agilex_Cobot_Magic_erase_board_left_side": { "task_categories": [ "robotics" ], @@ -58183,11 +55180,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_move_mouse", + "dataset_name": "Agilex_Cobot_Magic_erase_board_left_side", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "office & workspace", - "level2": "office", + "level1": "education", + "level2": "school", "level3": null, "level4": null, "level5": null @@ -58195,25 +55192,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "mouse", - "level1": "appliances", - "level2": "mouse", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "mouse_pad", - "level1": "appliances", - "level2": "mouse_pad", + "object_name": "whiteboard", + "level1": "stationery", + "level2": "whiteboard", "level3": null, "level4": null, "level5": null }, { - "object_name": "table", - "level1": "home_storage", - "level2": "table", + "object_name": "red_whiteboard_Eraser", + "level1": "stationery", + "level2": "red_whiteboard_Eraser", "level3": null, "level4": null, "level5": null @@ -58221,42 +55218,47 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "the right gripper organize the mouse on the mouse pad." + "wipe off the handwriting on the whiteboard." ], "sub_tasks": [ { - "subtask": "Grasp the mouse with the left gripper", + "subtask": "Place the eraser with the left gripper", "subtask_index": 0 }, { - "subtask": "Abnormal", + "subtask": "Grasp the eraser with the left gripper", "subtask_index": 1 }, { - "subtask": "Grasp the mouse with the right gripper", + "subtask": "End", "subtask_index": 2 }, { - "subtask": "Place the mouse on the mouse mat with the right gripper", + "subtask": "Grasp the eraser and wipe the blackboard with the right gripper", "subtask_index": 3 }, { - "subtask": "End", + "subtask": "Place the eraser with the right gripper", "subtask_index": 4 }, { - "subtask": "Place the mouse on the mouse mat with the left gripper", + "subtask": "Grasp the eraser with the right gripper", "subtask_index": 5 }, { - "subtask": "null", + "subtask": "Grasp the eraser and wipe the blackboard with the left gripper", "subtask_index": 6 + }, + { + "subtask": "null", + "subtask_index": 7 } ], "atomic_actions": [ "grasp", "lift", - "lower" + "lower", + "wipe" ], "robot_name": [ "Agilex_Cobot_Magic" @@ -58288,23 +55290,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 112, - "total_frames": 49737, + "total_episodes": 101, + "total_frames": 51478, "fps": 30, - "total_tasks": 7, - "total_videos": 336, + "total_tasks": 8, + "total_videos": 303, "total_chunks": 1, "chunks_size": 1000, "state_dim": 26, "action_dim": 26, "camera_views": 3, - "dataset_size": "386.89 MB" + "dataset_size": "490.77 MB" }, - "frame_num": 49737, - "dataset_size": "386.89 MB", - "data_structure": "Agilex_Cobot_Magic_move_mouse_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (100 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 51478, + "dataset_size": "490.77 MB", + "data_structure": "Agilex_Cobot_Magic_erase_board_left_side_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (89 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:111" + "train": "0:100" }, "features": { "observation.images.cam_head_rgb": { @@ -58481,17 +55483,17 @@ }, "subtask_annotation": { "names": null, + "dtype": "int32", "shape": [ 5 - ], - "dtype": "int32" + ] }, "scene_annotation": { "names": null, + "dtype": "int32", "shape": [ 1 - ], - "dtype": "int32" + ] }, "eef_sim_pose_state": { "names": [ @@ -58508,10 +55510,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_sim_pose_action": { "names": [ @@ -58528,110 +55530,110 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "gripper_mode_state": { "names": [ "left_gripper_mode", "right_gripper_mode" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "gripper_mode_action": { "names": [ "left_gripper_mode", "right_gripper_mode" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "gripper_activity_state": { "names": [ "left_gripper_activity", "right_gripper_activity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "gripper_activity_action": { "names": [ "left_gripper_activity", "right_gripper_activity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "gripper_open_scale_state": { "names": [ @@ -58675,221 +55677,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "G1edu-u3_put_the_tissue_box_al": { - "path": "G1edu-u3_put_the_tissue_box_al", - "dataset_name": "put_the_tissue_box_al", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place" - ], - "tasks": "End", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "home_storage", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tissue", - "level1": "daily_necessities", - "level2": "tissue", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-10816", - "dataset_size": "142.3MB", - "statistics": { - "total_episodes": 39, - "total_frames": 10816, - "total_tasks": 1, - "total_videos": 39, - "total_chunks": 1, - "chunks_size": 41, - "fps": 30 - }, - "dataset_uuid": "39aeb43d-8473-408d-a61d-bdd24134ab9a", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "End", - "Place the tissue box on the table with both gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_put_the_tissue_box_al_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_put_the_tissue_box_al_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "leju_robot_box_storage_parcel_i": { - "path": "leju_robot_box_storage_parcel_i", - "dataset_name": "box_storage_parcel_i", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Pick up the bottle from the table.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "home_storage", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "parcel", - "level1": "container", - "level2": "parcel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "conveyor_belt", - "level1": "industrial_equipment", - "level2": "conveyor_belt", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-313461", - "dataset_size": "19.3GB", - "statistics": { - "total_episodes": 222, - "total_frames": 313461, - "total_tasks": 1, - "total_videos": 666, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "6e0f6aaf-d447-436f-bfc3-bedd5a1bce04", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Pick up the bottle from the table.", - "Flip the bottle to face forward.", - "Take the bottle from the table.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_box_storage_parcel_i_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_box_storage_parcel_i_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Airbot_MMK2_storage_cake_pan": { + "Airbot_MMK2_close_drawer": { "task_categories": [ "robotics" ], @@ -58919,7 +55707,7 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_cake_pan", + "dataset_name": "Airbot_MMK2_close_drawer", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", @@ -58931,17 +55719,9 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "pan", - "level1": "kitchen_supplies", - "level2": "pan", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cake", - "level1": "bread", - "level2": "cake", + "object_name": "layer_transparent_drawer", + "level1": "storage_utensils", + "level2": "layer_transparent_drawer", "level3": null, "level4": null, "level5": null @@ -58949,38 +55729,24 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "put the pot down with left hand and place the cake in it with right hand." + "close the top drawer by hand." ], "sub_tasks": [ { - "subtask": "Grasp the frying pan with the left gripper", + "subtask": "End", "subtask_index": 0 }, { - "subtask": "Grasp the bread with the right gripper", + "subtask": "Close the top drawer with the right hand", "subtask_index": 1 }, - { - "subtask": "End", - "subtask_index": 2 - }, - { - "subtask": "Place the bread on the frying pan with the right gripper", - "subtask_index": 3 - }, - { - "subtask": "Place the frying pan on the table with the left gripper", - "subtask_index": 4 - }, { "subtask": "null", - "subtask_index": 5 + "subtask_index": 2 } ], "atomic_actions": [ - "grasp", - "pick", - "place" + "push" ], "robot_name": [ "Airbot_MMK2" @@ -59014,23 +55780,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 48, - "total_frames": 8832, + "total_episodes": 50, + "total_frames": 7377, "fps": 30, - "total_tasks": 6, - "total_videos": 192, + "total_tasks": 3, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "250.91 MB" + "dataset_size": "217.36 MB" }, - "frame_num": 8832, - "dataset_size": "250.91 MB", - "data_structure": "Airbot_MMK2_storage_cake_pan_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 7377, + "dataset_size": "217.36 MB", + "data_structure": "Airbot_MMK2_close_drawer_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:47" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -59384,7 +56150,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Galaxea_R1_Lite_mix_blue_yellow_large_test_tube": { + "Agilex_Cobot_Magic_storage_bread_basket": { "task_categories": [ "robotics" ], @@ -59414,11 +56180,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_mix_blue_yellow_large_test_tube", + "dataset_name": "Agilex_Cobot_Magic_storage_bread_basket", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "office_workspace", - "level2": "office", + "level1": "household", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -59426,49 +56192,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "blue_pigment", - "level1": "materials", - "level2": "blue_pigment", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yellow_pigment", - "level1": "materials", - "level2": "yellow_pigment", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "red_pigment", - "level1": "materials", - "level2": "red_pigment", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "test_tube_rack", - "level1": "holding_utensils", - "level2": "test_tube_rack", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "test_tubes", - "level1": "laboratory_supplies", - "level2": "test_tubes", + "object_name": "small_yellow _basket", + "level1": "home_storage", + "level2": "small_yellow _basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "beaker", - "level1": "holding_utensils", - "level2": "beaker", + "object_name": "waffle", + "level1": "food", + "level2": "waffle", "level3": null, "level4": null, "level5": null @@ -59476,78 +56218,63 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the test tube with yellow pigment and the test tube with blue pigment by grippers and pour them into the beaker." + "Put the bakery in the basket." ], "sub_tasks": [ { - "subtask": "Grasp the yellow reagent with the right gripper", + "subtask": "Abnormal", "subtask_index": 0 }, { - "subtask": "Pour the yellow reagent into the graduated cylinder and place the test tube into the paper cup", + "subtask": "Grasp the bread with right gripper", "subtask_index": 1 }, { - "subtask": "Pour the yellow reagent into the graduated cylinder with the right gripper", + "subtask": "End", "subtask_index": 2 }, { - "subtask": "Pour the blue reagent into the graduated cylinder and place the test tube into the paper cup", + "subtask": "Grasp the bread with left gripper", "subtask_index": 3 }, { - "subtask": "Pour the blue reagent into the graduated cylinder with the left gripper", + "subtask": "Place the bread in the basket with right gripper", "subtask_index": 4 }, { - "subtask": "End", + "subtask": "Place the bread in the basket with left gripper", "subtask_index": 5 }, - { - "subtask": "Place the test tube into the paper cup with the right gripper", - "subtask_index": 6 - }, - { - "subtask": "Place the test tube into the paper cup with the left gripper", - "subtask_index": 7 - }, - { - "subtask": "Grasp the blue reagent with the left gripper", - "subtask_index": 8 - }, { "subtask": "null", - "subtask_index": 9 + "subtask_index": 6 } ], "atomic_actions": [ "grasp", - "pick", - "place", - "pour" + "lift", + "lower" ], "robot_name": [ - "Galaxea_R1_Lite" + "Agilex_Cobot_Magic" ], "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", "cam_right_wrist_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -59558,53 +56285,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 51, - "total_frames": 29341, + "total_episodes": 98, + "total_frames": 32042, "fps": 30, - "total_tasks": 10, - "total_videos": 204, + "total_tasks": 7, + "total_videos": 294, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, - "camera_views": 4, - "dataset_size": "1.59 GB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "354.51 MB" }, - "frame_num": 29341, - "dataset_size": "1.59 GB", - "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_mix_blue_yellow_large_test_tube_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (39 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 32042, + "dataset_size": "354.51 MB", + "data_structure": "Agilex_Cobot_Magic_storage_bread_basket_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:50" + "train": "0:97" }, "features": { - "observation.images.cam_head_left_rgb": { - "dtype": "video", - "shape": [ - 720, - 1280, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 720, - "video.width": 1280, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -59613,8 +56317,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -59626,7 +56330,7 @@ "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -59636,7 +56340,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -59649,7 +56353,7 @@ "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -59659,7 +56363,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -59672,7 +56376,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -59681,20 +56385,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -59703,14 +56419,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -59862,26 +56590,6 @@ ], "dtype": "int32" }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, "gripper_mode_state": { "names": [ "left_gripper_mode", @@ -59921,6 +56629,26 @@ 2 ], "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } }, "authors": { @@ -59942,9 +56670,106 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Airbot_MMK2_storage_shark_doll": { + "alpha_bot_2_item_reversal": { + "path": "alpha_bot_2_item_reversal", + "dataset_name": "item_reversal", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Grasp the cup with left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cup", + "level1": "kitchen_supplies", + "level2": "cup", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-82235", + "dataset_size": "2.1GB", + "statistics": { + "total_episodes": 90, + "total_frames": 82235, + "total_tasks": 1, + "total_videos": 360, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "aac569d8-dbc7-4128-a573-659adbe0ed51", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the cup with left gripper", + "Place the cup on the table with right gripper", + "Pass the cup to the right gripper", + "End", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "alpha_bot_2_item_reversal_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "alpha_bot_2_item_reversal_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_take_egg": { "task_categories": [ "robotics" ], @@ -59974,11 +56799,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_shark_doll", + "dataset_name": "Airbot_MMK2_take_egg", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "commercial_convenience", - "level2": "supermarket", + "level1": "catering", + "level2": "restaurant", "level3": null, "level4": null, "level5": null @@ -59986,25 +56811,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "home_storage", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "shark_doll", - "level1": "toys", - "level2": "shark_doll", + "object_name": "egg_carton", + "level1": "disposable_items", + "level2": "egg_carton", "level3": null, "level4": null, "level5": null }, { - "object_name": "lid", - "level1": "laboratory_supplies", - "level2": "lid", + "object_name": "egg", + "level1": "eggs", + "level2": "egg", "level3": null, "level4": null, "level5": null @@ -60012,34 +56829,30 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the shark doll and put it in the lid." + "take out the brown eggs from the egg box." ], "sub_tasks": [ { - "subtask": "Place the whale on the white lid with the right gripper", + "subtask": "End", "subtask_index": 0 }, { - "subtask": "Grasp the whale with the left gripper", + "subtask": "Place the egg on the table with the right gripper", "subtask_index": 1 }, { - "subtask": "Deliver the whale from left gripper to right gripper", + "subtask": "Grasp the egg from the egg storage box with the right gripper", "subtask_index": 2 }, - { - "subtask": "End", - "subtask_index": 3 - }, { "subtask": "null", - "subtask_index": 4 + "subtask_index": 3 } ], "atomic_actions": [ "grasp", - "place", - "pick" + "pick", + "place" ], "robot_name": [ "Airbot_MMK2" @@ -60073,23 +56886,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 93, - "total_frames": 13034, + "total_episodes": 101, + "total_frames": 18643, "fps": 30, - "total_tasks": 5, - "total_videos": 372, + "total_tasks": 4, + "total_videos": 404, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "461.32 MB" + "dataset_size": "758.58 MB" }, - "frame_num": 13034, - "dataset_size": "461.32 MB", - "data_structure": "Airbot_MMK2_storage_shark_doll_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (81 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 18643, + "dataset_size": "758.58 MB", + "data_structure": "Airbot_MMK2_take_egg_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (89 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:92" + "train": "0:100" }, "features": { "observation.images.cam_head_rgb": { @@ -60443,9 +57256,9 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Cobot_Magic_storage_plate": { - "path": "Cobot_Magic_storage_plate", - "dataset_name": "storage_plate", + "Cobot_Magic_steamer_storage_dumpling": { + "path": "Cobot_Magic_steamer_storage_dumpling", + "dataset_name": "steamer_storage_dumpling", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -60456,7 +57269,7 @@ "pick", "place" ], - "tasks": "Left grab the white plate.", + "tasks": "use the left arm to put the dumpling into the steamer", "objects": [ { "object_name": "table", @@ -60467,9 +57280,9 @@ "level5": null }, { - "object_name": "rack", - "level1": "furniture", - "level2": "rack", + "object_name": "dumplings", + "level1": "food", + "level2": "dumplings", "level3": null, "level4": null, "level5": null @@ -60483,134 +57296,27 @@ "level5": null }, { - "object_name": "plate", + "object_name": "steamer", "level1": "container", - "level2": "plate", + "level2": "steamer", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-57019", - "dataset_size": "903.4MB", - "statistics": { - "total_episodes": 84, - "total_frames": 57019, - "total_tasks": 1, - "total_videos": 252, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 50 - }, - "dataset_uuid": "9f0b0953-fa62-4d10-8b2a-17b1ff44e9cf", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Left grab the white plate.", - "Left grab the gray plate.", - "Place the plate on the rack", - "Right place it at the front of the shelf.", - "Right receive the plate.", - "Pick up the plate from the table", - "Left lift the plate.", - "end", - "Left grab the blue plate.", - "Left grab the yellow plate.", - "Hand over the plate", - "Right place it at the back of the shelf.", - "Left grab the pink plate.", - "Left grab the green plate.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_storage_plate_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_storage_plate_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Tianqin_A2_place_the_paper_box": { - "path": "Tianqin_A2_place_the_paper_box", - "dataset_name": "place_the_paper_box", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "End", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "home_storage", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-94841", - "dataset_size": "3.0GB", + "frame_range": "0-417589", + "dataset_size": "19.8GB", "statistics": { - "total_episodes": 236, - "total_frames": 94841, - "total_tasks": 1, - "total_videos": 708, + "total_episodes": 580, + "total_frames": 417589, + "total_tasks": 6, + "total_videos": 1740, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "291007f6-cfd0-4e23-bc97-da01c0f367ac", + "dataset_uuid": "df5bb1ee-1fd7-4b3d-8502-4cda9ee6bbc5", "language": [ "en", "zh" @@ -60619,12 +57325,16 @@ "robotics" ], "sub_tasks": [ - "End", + "use the left arm to put the dumpling into the steamer", "Abnormal", - "Grasp the paper box", - "Place the paper box on the table", - "Place the data cable in the another box", - "Grasp the data cable", + "use the right arm to put the dumpling into the steamer", + "Grasp the dumpling with left gripper", + "End", + "Place the dumpling on the steamer with right gripper", + "use the left arm to grab a dumpling", + "Grasp the dumpling with right gripper", + "use the right arm to grab a dumpling", + "Place the dumpling on the steamer with left gripper", "null" ], "annotations": { @@ -60662,12 +57372,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Tianqin_A2_place_the_paper_box_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Tianqin_A2_place_the_paper_box_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Cobot_Magic_steamer_storage_dumpling_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_steamer_storage_dumpling_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Split_aloha_wipe_the_table": { - "path": "Split_aloha_wipe_the_table", - "dataset_name": "wipe_the_table", + "Cobot_Magic_plate_storaje_baozi": { + "path": "Cobot_Magic_plate_storaje_baozi", + "dataset_name": "plate_storaje_baozi", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -60676,10 +57386,9 @@ "atomic_actions": [ "grasp", "place", - "pick", - "wipe" + "pick" ], - "tasks": "Stand the paper cup upright", + "tasks": "Grasp the baozi on the steamer with left gripper", "objects": [ { "object_name": "table", @@ -60690,295 +57399,43 @@ "level5": null }, { - "object_name": "cup", - "level1": "container", - "level2": "cup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "rag", + "object_name": "tablecloth", "level1": "clothing", - "level2": "rag", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "stain", - "level1": "garbage", - "level2": "stain", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-166266", - "dataset_size": "1.3GB", - "statistics": { - "total_episodes": 302, - "total_frames": 166266, - "total_tasks": 3, - "total_videos": 906, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "c5648c51-14f0-4d84-a3cc-2fd1c28069a8", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Stand the paper cup upright", - "Wipe the stains off the table with a rag", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Split_aloha_wipe_the_table_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Split_aloha_wipe_the_table_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "RMC-AIDA-L_food_packaging": { - "path": "RMC-AIDA-L_food_packaging", - "dataset_name": "food_packaging", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place", - "pull" - ], - "tasks": "Grab the pear with your right hand.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "level2": "tablecloth", "level3": null, "level4": null, "level5": null }, { - "object_name": "bag", - "level1": "container", - "level2": "bag", + "object_name": "baozi", + "level1": "food", + "level2": "baozi", "level3": null, "level4": null, "level5": null }, { - "object_name": "lunch_box", + "object_name": "steamer", "level1": "container", - "level2": "lunch_box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cucumber", - "level1": "vegetable", - "level2": "cucumber", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pear", - "level1": "fruit", - "level2": "pear", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "fruit", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peach", - "level1": "fruit", - "level2": "peach", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-817652", - "dataset_size": "9.4GB", - "statistics": { - "total_episodes": 497, - "total_frames": 817652, - "total_tasks": 2, - "total_videos": 1491, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "59eb8786-4e0e-4c5e-b129-aec36ec1eda9", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grab the pear with your right hand.", - "Pick up the lunch box with your right hand.", - "end", - "Put the cucumber into the lunch bag with your right hand.", - "Hold the lunch bag with your left hand.", - "Secure the lunch bag with your left hand.", - "Pick up the cucumber with your right hand.", - "Hold the lunch bag with your right hand.", - "Take the banana with your right hand.", - "Place the lunch box into the lunch bag with your right hand.", - "Place the peach into the lunch bag with your right hand.", - "Zip up the lunch bag with your right hand.", - "Put the banana into the lunch bag with your right hand.", - "Place the pear into the lunch bag with your right hand.", - "Grab the peach with your right hand.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "RMC-AIDA-L_food_packaging_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_food_packaging_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AgiBot-g1_tool_storage": { - "path": "AgiBot-g1_tool_storage", - "dataset_name": "tool_storage", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place the mouse and power cord into the box.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "home_storage", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tools", - "level1": "tools", - "level2": "tools", + "level2": "steamer", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-30600", - "dataset_size": "13.5GB", + "frame_range": "0-261093", + "dataset_size": "12.3GB", "statistics": { - "total_episodes": 67, - "total_frames": 30600, - "total_tasks": 1, - "total_videos": 536, + "total_episodes": 495, + "total_frames": 261093, + "total_tasks": 6, + "total_videos": 1485, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "4681c572-c9b2-475c-b9c1-66ed9f3d533d", + "dataset_uuid": "749fc774-003a-487c-948b-b6fc7ad307e1", "language": [ "en", "zh" @@ -60987,8 +57444,22 @@ "robotics" ], "sub_tasks": [ - "Place the mouse and power cord into the box.", - "Grab and lift both the mouse and power cord from the accessory packaging area at the same time.", + "Grasp the baozi on the steamer with left gripper", + "Abnormal", + "Place the yellow baozi on the plate with left gripper", + "Place the yellow baozi on the plate with right gripper", + "End", + "Grasp the dumpling with left gripper", + "use the left arm to grab the steamed stuffed bun that is closest to it", + "use the right arm to grab the steamed stuffed bun that is closest to it", + "use the right arm to put the steamed stuffed bun into the plate", + "Place the baozi on the plate with left gripper", + "Grasp the yellow baozi on the steamer with left gripper", + "use the left arm to put the steamed stuffed bun into the plate", + "Place the dumpling on the steamer with right gripper", + "Grasp the dumpling with right gripper", + "Grasp the yellow baozi on the steamer with right gripper", + "Place the dumpling on the steamer with left gripper", "null" ], "annotations": { @@ -61026,10 +57497,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AgiBot-g1_tool_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_tool_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Cobot_Magic_plate_storaje_baozi_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_plate_storaje_baozi_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Agilex_Cobot_Magic_fold_jeans_shorts_children's": { + "Airbot_MMK2_storage_apple_orange": { "task_categories": [ "robotics" ], @@ -61059,11 +57530,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_fold_jeans_shorts_children's", + "dataset_name": "Airbot_MMK2_storage_apple_orange", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "bedroom", + "level1": "scene_level1", + "level2": "scene_level2", "level3": null, "level4": null, "level5": null @@ -61071,25 +57542,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "home_storage", - "level2": "table", + "object_name": "apple", + "level1": "fruits", + "level2": "apple", "level3": null, "level4": null, "level5": null }, { - "object_name": "denim_shorts", - "level1": "clothing", - "level2": "denim_shorts", + "object_name": "orange", + "level1": "fruits", + "level2": "orange", "level3": null, "level4": null, "level5": null }, { - "object_name": "green_tray", - "level1": "kitchen_supplies", - "level2": "green_tray", + "object_name": "storage_box", + "level1": "storage_utensils", + "level2": "storage_box", "level3": null, "level4": null, "level5": null @@ -61097,72 +57568,61 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "Fold the children's denim shorts in half with two grippers, then fold them in half again, and use the left claw to place the folded children's denim shorts on the tray." + "pick up the apple with left hand and put it in the storage box, and pick up the orange with right hand and put it in the storage box." ], "sub_tasks": [ { - "subtask": "Grasp the blue trousers with the right gripper", + "subtask": "Grasp the orange with the right gripper", "subtask_index": 0 }, { - "subtask": "Fold the blue trousers upwards with the right gripper", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "Fold the blue trousers from right to left with the right gripper", + "subtask": "Place the orange into the right compartment of the storage box with the right gripper", "subtask_index": 2 }, { - "subtask": "Place the folded blue trousers on the green tray with the left gripper", + "subtask": "Grasp the apple with the left gripper", "subtask_index": 3 }, { - "subtask": "Grasp the blue trousers with the left gripper", + "subtask": "Place the apple into the left compartment of the storage box with the left gripper", "subtask_index": 4 }, - { - "subtask": "Fold the blue trousers upwards with the left gripper", - "subtask_index": 5 - }, - { - "subtask": "Fold the blue trousers from left to right with the left gripper", - "subtask_index": 6 - }, - { - "subtask": "end", - "subtask_index": 7 - }, { "subtask": "null", - "subtask_index": 8 + "subtask_index": 5 } ], "atomic_actions": [ "grasp", - "lift", - "lower", - "fold" + "pick", + "place" ], "robot_name": [ - "Agilex_Cobot_Magic" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -61173,23 +57633,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 60562, + "total_episodes": 41, + "total_frames": 6657, "fps": 30, - "total_tasks": 9, - "total_videos": 150, + "total_tasks": 6, + "total_videos": 164, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "912.51 MB" + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "176.34 MB" }, - "frame_num": 60562, - "dataset_size": "912.51 MB", - "data_structure": "Agilex_Cobot_Magic_fold_jeans_shorts_children_s_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 6657, + "dataset_size": "176.34 MB", + "data_structure": "Airbot_MMK2_storage_apple_orange_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (29 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:49" + "train": "0:40" }, "features": { "observation.images.cam_head_rgb": { @@ -61261,10 +57721,33 @@ "has_audio": false } }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, "observation.state": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -61273,32 +57756,42 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -61307,26 +57800,36 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -61477,66 +57980,6 @@ 2 ], "dtype": "int32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] } }, "authors": { @@ -61558,48 +58001,22 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Realman_RMC-AIDA-L_storage_towel_basket": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" + "R1_Lite_move_the_position_of_the_milk": { + "path": "R1_Lite_move_the_position_of_the_milk", + "dataset_name": "move_the_position_of_the_milk", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } + "scene_type": [], + "atomic_actions": [ + "place", + "pick", + "grasp" ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Realman_RMC-AIDA-L_storage_towel_basket", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "household", - "level2": "kitchen", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "tasks": "Grasp the wangzai milk with left gripper", "objects": [ { "object_name": "table", @@ -61610,666 +58027,355 @@ "level5": null }, { - "object_name": "basket", - "level1": "home_storage", - "level2": "basket", + "object_name": "pen", + "level1": "office_supplies", + "level2": "pen", "level3": null, "level4": null, "level5": null }, { - "object_name": "towel", + "object_name": "banana", + "level1": "fruit", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bath_ball", "level1": "daily_necessities", - "level2": "towel", + "level2": "bath_ball", "level3": null, "level4": null, "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "the left gripper grasp the basket on the table, the right grippe pick up the towel on the table and place it into the basket." - ], - "sub_tasks": [ + }, { - "subtask": "Place the towel into the basket with the right gripper", - "subtask_index": 0 + "object_name": "bowl", + "level1": "container", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Abnormal", - "subtask_index": 1 + "object_name": "plate", + "level1": "container", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "PIck up the basket with the left gripper", - "subtask_index": 2 + "object_name": "can", + "level1": "container", + "level2": "can", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "PIck up the towel with the right gripper", - "subtask_index": 3 + "object_name": "eraser", + "level1": "office_supplies", + "level2": "eraser", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "end", - "subtask_index": 4 + "object_name": "hard_cleanser", + "level1": "daily_necessities", + "level2": "hard_cleanser", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "null", - "subtask_index": 5 - } - ], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "robot_name": [ - "Realman_RMC-AIDA-L" - ], - "end_effector_type": "two_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb" - ], - "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" - }, - "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" - ], - "statistics": { - "total_episodes": 118, - "total_frames": 58520, - "fps": 30, - "total_tasks": 6, - "total_videos": 354, - "total_chunks": 1, - "chunks_size": 1000, - "state_dim": 28, - "action_dim": 28, - "camera_views": 3, - "dataset_size": "684.59 MB" - }, - "frame_num": 58520, - "dataset_size": "684.59 MB", - "data_structure": "Realman_RMC-AIDA-L_storage_towel_basket_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (106 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", - "splits": { - "train": "0:117" - }, - "features": { - "observation.images.cam_head_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } + "object_name": "long_bread", + "level1": "food", + "level2": "long_bread", + "level3": null, + "level4": null, + "level5": null }, - "observation.images.cam_left_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } + { + "object_name": "peeler", + "level1": "tool", + "level2": "peeler", + "level3": null, + "level4": null, + "level5": null }, - "observation.images.cam_right_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } + { + "object_name": "block", + "level1": "toy", + "level2": "block", + "level3": null, + "level4": null, + "level5": null }, - "observation.state": { - "dtype": "float32", - "shape": [ - 28 - ], - "names": [ - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "right_arm_joint_7_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad", - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "left_arm_joint_7_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad" - ] - }, - "action": { - "dtype": "float32", - "shape": [ - 28 - ], - "names": [ - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "right_arm_joint_7_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad", - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "left_arm_joint_7_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad" - ] + { + "object_name": "duck", + "level1": "toy", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null }, - "timestamp": { - "dtype": "float32", - "shape": [ - 1 - ], - "names": null + { + "object_name": "soap", + "level1": "daily_necessities", + "level2": "soap", + "level3": null, + "level4": null, + "level5": null }, - "frame_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "object_name": "box", + "level1": "container", + "level2": "box", + "level3": null, + "level4": null, + "level5": null }, - "episode_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "object_name": "basket", + "level1": "container", + "level2": "basket", + "level3": null, + "level4": null, + "level5": null }, - "index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "object_name": "cola", + "level1": "drink", + "level2": "cola", + "level3": null, + "level4": null, + "level5": null }, - "task_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "object_name": "detergent", + "level1": "daily_necessities", + "level2": "detergent", + "level3": null, + "level4": null, + "level5": null }, - "subtask_annotation": { - "names": null, - "shape": [ - 5 - ], - "dtype": "int32" + { + "object_name": "egg_beater", + "level1": "electrical_appliances", + "level2": "egg_beater", + "level3": null, + "level4": null, + "level5": null }, - "scene_annotation": { - "names": null, - "shape": [ - 1 - ], - "dtype": "int32" + { + "object_name": "towel", + "level1": "clothing", + "level2": "towel", + "level3": null, + "level4": null, + "level5": null }, - "eef_sim_pose_state": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" + { + "object_name": "orange", + "level1": "fruit", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null }, - "eef_sim_pose_action": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" + { + "object_name": "peach", + "level1": "fruit", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "marker", + "level1": "office_supplies", + "level2": "marker", + "level3": null, + "level4": null, + "level5": null }, - "eef_direction_action": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "rubiks_cube", + "level1": "toy", + "level2": "rubiks_cube", + "level3": null, + "level4": null, + "level5": null }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "bread_slice", + "level1": "food", + "level2": "bread_slice", + "level3": null, + "level4": null, + "level5": null }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "brush", + "level1": "daily_necessities", + "level2": "brush", + "level3": null, + "level4": null, + "level5": null }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "yogurt", + "level1": "drink", + "level2": "yogurt", + "level3": null, + "level4": null, + "level5": null }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "power_strip", + "level1": "electric_appliance", + "level2": "power_strip", + "level3": null, + "level4": null, + "level5": null }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" + { + "object_name": "milk", + "level1": "drink", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" + { + "object_name": "soda", + "level1": "drink", + "level2": "soda", + "level3": null, + "level4": null, + "level5": null }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "lime", + "level1": "fruit", + "level2": "lime", + "level3": null, + "level4": null, + "level5": null }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "coffee_capsule", + "level1": "drink", + "level2": "coffee_capsule", + "level3": null, + "level4": null, + "level5": null }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "dish", + "level1": "container", + "level2": "dish", + "level3": null, + "level4": null, + "level5": null }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - } - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" - } - ] - }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" - }, - "Cobot_Magic_cap_the_pen_a": { - "path": "Cobot_Magic_cap_the_pen_a", - "dataset_name": "cap_the_pen_a", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place", - "insert" - ], - "tasks": "Secure the pen cap with the right hand.", - "objects": [ { - "object_name": "table", + "object_name": "glass", "level1": "furniture", - "level2": "table", + "level2": "glass", "level3": null, "level4": null, "level5": null }, { - "object_name": "pen", - "level1": "office_supplies", - "level2": "pen", + "object_name": "egg_yolk_pastry", + "level1": "food", + "level2": "egg_yolk_pastry", "level3": null, "level4": null, "level5": null }, { - "object_name": "pen_cap", - "level1": "office_supplies", - "level2": "pen_cap", + "object_name": "glasses_case", + "level1": "daily_necessities", + "level2": "glasses_case", "level3": null, "level4": null, "level5": null }, { - "object_name": "tablecloth", - "level1": "clothing", - "level2": "tablecloth", + "object_name": "gum", + "level1": "daily_necessities", + "level2": "gum", "level3": null, "level4": null, "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-51317", - "dataset_size": "1.0GB", - "statistics": { - "total_episodes": 55, - "total_frames": 51317, - "total_tasks": 1, - "total_videos": 165, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 50 - }, - "dataset_uuid": "b549e79c-e49e-4cec-a476-b94bdd766f22", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Secure the pen cap with the right hand.", - "Insert the pen into the cap", - "End", - "Put the cap on the pen", - "Pick up the pen cap", - "Grab the pen barrel with the left hand.", - "Secure the pen cap with the left hand.", - "Grab the pen cap with the left hand.", - "Place the pen on the table", - "Abnormal", - "Grab the pen cap with the right hand.", - "Insert the pen cap with the right hand.", - "Grab the pen barrel with the right hand.", - "Pick up the pen", - "Place the pen down with the right hand.", - "Insert the pen cap with the left hand.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_cap_the_pen_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_cap_the_pen_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Split_aloha_wipe_table": { - "path": "Split_aloha_wipe_table", - "dataset_name": "wipe_table", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place", - "pick", - "wipe" - ], - "tasks": "Grasp the rag with right gripper", - "objects": [ + }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "tape", + "level1": "daily_necessities", + "level2": "tape", "level3": null, "level4": null, "level5": null }, { - "object_name": "cup", - "level1": "container", - "level2": "cup", + "object_name": "soft_cleanser", + "level1": "daily_necessities", + "level2": "soft_cleanser", "level3": null, "level4": null, "level5": null }, { - "object_name": "rag", - "level1": "clothing", - "level2": "rag", + "object_name": "chips", + "level1": "food", + "level2": "chips", "level3": null, "level4": null, "level5": null }, { - "object_name": "stain", - "level1": "garbage", - "level2": "stain", + "object_name": "chocolate", + "level1": "food", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cookie", + "level1": "food", + "level2": "cookie", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "spoon", + "level1": "tableware", + "level2": "spoon", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-106804", - "dataset_size": "1.3GB", + "frame_range": "0-12898", + "dataset_size": "421.8MB", "statistics": { - "total_episodes": 200, - "total_frames": 106804, + "total_episodes": 59, + "total_frames": 12898, "total_tasks": 1, - "total_videos": 600, + "total_videos": 236, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "18848f29-f6ff-4f8a-ab4d-acc67d812c64", + "dataset_uuid": "945013f7-9a63-4c59-af10-4ad4df23d67f", "language": [ "en", "zh" @@ -62278,14 +58384,13 @@ "robotics" ], "sub_tasks": [ - "Grasp the rag with right gripper", - "Abnormal", - "Place the rag on the table with right gripper", - "Stand the paper cup upright with left gripper", - "End", + "Grasp the wangzai milk with left gripper", + "Place the wangzai milk on the table with right gripper", "Static", - "Grasp the paper cup with left gripper", - "Wipe the stains off the table with the rag with right gripper", + "Place the wangzai milk on the table with left gripper", + "Grasp the wangzai milk with right gripper", + "End", + "Abnormal", "null" ], "annotations": { @@ -62323,12 +58428,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Split_aloha_wipe_table_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Split_aloha_wipe_table_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "R1_Lite_move_the_position_of_the_milk_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_move_the_position_of_the_milk_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "R1_Lite_pour_water": { - "path": "R1_Lite_pour_water", - "dataset_name": "pour_water", + "R1_Lite_place_the_dress_shirt_on_the_hanger": { + "path": "R1_Lite_place_the_dress_shirt_on_the_hanger", + "dataset_name": "place_the_dress_shirt_on_the_hanger", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -62339,54 +58444,54 @@ "pick", "place" ], - "tasks": "Pour water into the cup", + "tasks": "abnormal", "objects": [ { - "object_name": "table", + "object_name": "hanger", "level1": "furniture", - "level2": "table", + "level2": "hanger", "level3": null, "level4": null, "level5": null }, { - "object_name": "water", - "level1": "drink", - "level2": "water", + "object_name": "shirt", + "level1": "clothing", + "level2": "shirt", "level3": null, "level4": null, "level5": null }, { - "object_name": "kettle", - "level1": "electric_appliance", - "level2": "kettle", + "object_name": "plate", + "level1": "container", + "level2": "plate", "level3": null, "level4": null, "level5": null }, { - "object_name": "cup", - "level1": "container", - "level2": "cup", + "object_name": "bed", + "level1": "furniture", + "level2": "bed", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-53125", - "dataset_size": "2.0GB", + "frame_range": "0-120516", + "dataset_size": "7.1GB", "statistics": { - "total_episodes": 51, - "total_frames": 53125, + "total_episodes": 103, + "total_frames": 120516, "total_tasks": 1, - "total_videos": 153, + "total_videos": 309, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "8b05bffb-745b-4de7-bbfa-113e42018671", + "dataset_uuid": "99bbe00a-c986-419f-b64a-51eef829a3b8", "language": [ "en", "zh" @@ -62395,12 +58500,12 @@ "robotics" ], "sub_tasks": [ - "Pour water into the cup", - "Put the cup down on the table", - "Put down the kettle on the table", - "Pour water into another cup", - "Pick up the cup", - "Pick up the kettle", + "abnormal", + "Hook the other side of the shirt onto the hanger", + "Hook one side of the shirt onto the hanger", + "Lift the shirt and hanger", + "Take the clothes hanger out of the clothes", + "Place the hanger on the bed", "null" ], "annotations": { @@ -62438,12 +58543,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_pour_water_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_pour_water_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "R1_Lite_place_the_dress_shirt_on_the_hanger_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_place_the_dress_shirt_on_the_hanger_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Galbot_g1_steamer_storage_baozi_j": { - "path": "Galbot_g1_steamer_storage_baozi_j", - "dataset_name": "steamer_storage_baozi_j", + "R1_Lite_take_and_put_away_garden_stuff_a": { + "path": "R1_Lite_take_and_put_away_garden_stuff_a", + "dataset_name": "take_and_put_away_garden_stuff_a", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -62452,10 +58557,28 @@ "atomic_actions": [ "grasp", "pick", - "place" + "place", + "pull", + "push" ], - "tasks": "Place the pot lid on the table with left gripper", + "tasks": "Grasp the apple and place it on the table with right gripper", "objects": [ + { + "object_name": "refrigerator", + "level1": "furniture", + "level2": "refrigerator", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "drawer", + "level1": "container", + "level2": "drawer", + "level3": null, + "level4": null, + "level5": null + }, { "object_name": "table", "level1": "furniture", @@ -62465,51 +58588,67 @@ "level5": null }, { - "object_name": "baozi", + "object_name": "tray", + "level1": "container", + "level2": "tray", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "potato", "level1": "food", - "level2": "baozi", + "level2": "potato", "level3": null, "level4": null, "level5": null }, { - "object_name": "steamer", - "level1": "cookware", - "level2": "steamer", + "object_name": "green_pepper", + "level1": "food", + "level2": "green_pepper", "level3": null, "level4": null, "level5": null }, { - "object_name": "pot_lid", - "level1": "daily_necessities", - "level2": "pot_lid", + "object_name": "pumpkin", + "level1": "food", + "level2": "pumpkin", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", + "object_name": "apple", + "level1": "food", + "level2": "apple", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange", + "level1": "food", + "level2": "orange", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-56015", - "dataset_size": "1.1GB", + "frame_range": "0-355644", + "dataset_size": "15.9GB", "statistics": { - "total_episodes": 41, - "total_frames": 56015, + "total_episodes": 148, + "total_frames": 355644, "total_tasks": 1, - "total_videos": 123, + "total_videos": 444, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "b53dc50b-2b01-4783-900a-e0a8fa876a20", + "dataset_uuid": "7c763184-bc85-4d7e-b01b-85ba85cbb6f2", "language": [ "en", "zh" @@ -62518,16 +58657,39 @@ "robotics" ], "sub_tasks": [ - "Place the pot lid on the table with left gripper", - "Grasp the pot lid with left gripper", - "Place the pot lid on the steamer with left gripper", + "Grasp the apple and place it on the table with right gripper", + "Place the green bell pepper in the lower drawer with right gripper", + "Put green pepper in the refrigerator drawer", + "Place the orange in the lower drawer with right gripper", + "Place the potato in the lower drawer with left gripper", + "Take out the potato and place it on the table", + "Open the refrigerator door with left gripper", + "Place the potato in the lower drawer with right gripper", + "Take out the apple and place it on the tray", + "Open the refrigerator door", "End", - "Move the switch to the right with left gripper", - "Place the baozi on the steamer with right gripper", - "Grasp the baozi with right gripper", - "Place the baozi on the plate with right gripper", - "Move the switch to the left with right gripper", - "Grasp the baozi in the plate with right gripper", + "Close the refrigerator door", + "Close the lower drawer with the left gripper", + "Grasp the potato and place it on the table with left gripper", + "Open the lower refrigerator drawer", + "Close the middle refrigerator door with the right gripper", + "Place the apple in the lower drawer with right gripper", + "Put orange in the refrigerator drawer", + "Abnormal", + "Put pumpkin in the refrigerator drawer", + "Grasp the potato and place it on the table with right gripper", + "Close the lower refrigerator drawer", + "Place the green bell pepper in the lower drawer with left gripper", + "Put apple in the refrigerator drawer", + "Take out the pumpkin and place it on the table", + "Close the lower drawer with the right gripper", + "Grasp the orange and place it on the table with right gripper", + "Put potato in the refrigerator drawer", + "Take out the green pepper and place it on the table", + "Take out the orange and place it on the tray", + "Grasp the green bell pepper and place it on the table with right gripper", + "Take out the apple and place it on the table", + "Open the lower drawer with the right gripper", "null" ], "annotations": { @@ -62565,10 +58727,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Galbot_g1_steamer_storage_baozi_j_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Galbot_g1_steamer_storage_baozi_j_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "R1_Lite_take_and_put_away_garden_stuff_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_take_and_put_away_garden_stuff_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Agilex_Cobot_Magic_move_pencil_sharpener": { + "Airbot_MMK2_storage_tomato_potato": { "task_categories": [ "robotics" ], @@ -62598,11 +58760,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_move_pencil_sharpener", + "dataset_name": "Airbot_MMK2_storage_tomato_potato", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "education", - "level2": "school", + "level1": "household", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -62610,49 +58772,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "home_storage", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "deli_stapler", - "level1": "stationery", - "level2": "deli_stapler", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "deli_water-based_marker", - "level1": "stationery", - "level2": "deli_water-based_marker", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "deli_watercolor_marker_box", - "level1": "stationery", - "level2": "deli_watercolor_marker_box", + "object_name": "potato", + "level1": "vegetables", + "level2": "potato", "level3": null, "level4": null, "level5": null }, { - "object_name": "purple_incenser", - "level1": "daily_necessities", - "level2": "purple_incense", + "object_name": "tomato", + "level1": "vegetables", + "level2": "tomato", "level3": null, "level4": null, "level5": null }, { - "object_name": "pencil_sharpener", - "level1": "stationery", - "level2": "pencil_sharpener", + "object_name": "storage_box", + "level1": "storage_utensils", + "level2": "storage_box", "level3": null, "level4": null, "level5": null @@ -62660,27 +58798,27 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "Place the pencil sharpener on the left side of the stapler." + "pick up the potato with left hand and put it in the storage box, and pick up the tomato with right hand and put it in the storage box." ], "sub_tasks": [ { - "subtask": "Place the pencil sharpener to the left of the stapler with the left gripper", + "subtask": "Grasp the tomato with the right gripper", "subtask_index": 0 }, { - "subtask": "Grasp the pencil sharpene with the right gripper", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "End", + "subtask": "Place the potato into the left compartment of the storage box with the left gripper", "subtask_index": 2 }, { - "subtask": "Grasp the pencil sharpene with the left gripper", + "subtask": "Grasp the potato with the left gripper", "subtask_index": 3 }, { - "subtask": "Place the pencil sharpener to the left of the stapler with the right gripper", + "subtask": "Place the tomato into the right compartment of the storage box with the right gripper", "subtask_index": 4 }, { @@ -62690,29 +58828,31 @@ ], "atomic_actions": [ "grasp", - "lift", - "lower" + "pick", + "place" ], "robot_name": [ - "Agilex_Cobot_Magic" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -62723,23 +58863,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 98, - "total_frames": 41602, + "total_episodes": 47, + "total_frames": 6005, "fps": 30, "total_tasks": 6, - "total_videos": 294, + "total_videos": 188, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "374.66 MB" + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "161.13 MB" }, - "frame_num": 41602, - "dataset_size": "374.66 MB", - "data_structure": "Agilex_Cobot_Magic_move_pencil_sharpener_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 6005, + "dataset_size": "161.13 MB", + "data_structure": "Airbot_MMK2_storage_tomato_potato_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:97" + "train": "0:46" }, "features": { "observation.images.cam_head_rgb": { @@ -62811,10 +58951,33 @@ "has_audio": false } }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, "observation.state": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -62823,32 +58986,42 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -62857,26 +59030,36 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -62916,17 +59099,17 @@ }, "subtask_annotation": { "names": null, - "dtype": "int32", "shape": [ 5 - ] + ], + "dtype": "int32" }, "scene_annotation": { "names": null, - "dtype": "int32", "shape": [ 1 - ] + ], + "dtype": "int32" }, "eef_sim_pose_state": { "names": [ @@ -62943,10 +59126,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_sim_pose_action": { "names": [ @@ -62963,130 +59146,70 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" ], - "dtype": "float32", - "shape": [ - 2 - ] + "dtype": "int32" } }, "authors": { @@ -63108,9 +59231,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_dial_number": { + "Galaxea_R1_Lite_arrange_baai_then_brain": { "task_categories": [ "robotics" ], @@ -63140,11 +59263,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_dial_number", + "dataset_name": "Galaxea_R1_Lite_arrange_baai_then_brain", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "bedroom", + "level1": "eduction", + "level2": "school", "level3": null, "level4": null, "level5": null @@ -63152,66 +59275,81 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "mobile_phone", - "level1": "appliances", - "level2": "mobile_phone", + "object_name": "early_education_toys", + "level1": "buiding_blocks", + "level2": "early_education_toys", "level3": null, "level4": null, - "level5": null + "level5:operation_platform_height": 77.2 } ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the phone button and then put it down." + "use the gripper to find blocks with the letters b, a, a, and i on the table and arrange them into BAAI, then find r and N and turn the arranged baai into brian." ], "sub_tasks": [ { - "subtask": "Dial the number with the left gripper", + "subtask": "Abnormal", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Place the second block A in the third location", "subtask_index": 1 }, { - "subtask": "Lift the phone with the right gripper", + "subtask": "Place the block I in the fourth location", "subtask_index": 2 }, { - "subtask": "Grasp the phone with the right gripper", + "subtask": "Place the first block A in the second location", "subtask_index": 3 }, { - "subtask": "Place the phone on the table with the right gripper", + "subtask": "Place the block B in the first location", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "End", "subtask_index": 5 + }, + { + "subtask": "Move the block A out of the second location", + "subtask_index": 6 + }, + { + "subtask": "Place the block N in the fifth location", + "subtask_index": 7 + }, + { + "subtask": "Place the block R in the second location", + "subtask_index": 8 + }, + { + "subtask": "null", + "subtask_index": 9 } ], "atomic_actions": [ "grasp", "pick", - "place", - "press" + "place" ], "robot_name": [ - "Airbot_MMK2" + "Galaxea_R1_Lite" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -63228,30 +59366,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 98, - "total_frames": 41296, + "total_episodes": 56, + "total_frames": 44471, "fps": 30, - "total_tasks": 6, - "total_videos": 392, + "total_tasks": 10, + "total_videos": 224, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, + "state_dim": 14, + "action_dim": 14, "camera_views": 4, - "dataset_size": "1.39 GB" + "dataset_size": "1.26 GB" }, - "frame_num": 41296, - "dataset_size": "1.39 GB", - "data_structure": "Airbot_MMK2_dial_number_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 44471, + "dataset_size": "1.26 GB", + "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_arrange_baai_then_brain_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (44 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:97" + "train": "0:55" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -63260,8 +59398,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -63270,11 +59408,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_head_right_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -63283,8 +59421,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -63293,10 +59431,10 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -63306,7 +59444,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -63316,10 +59454,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -63329,7 +59467,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -63342,7 +59480,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -63357,36 +59495,14 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -63401,30 +59517,8 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "timestamp": { @@ -63575,6 +59669,66 @@ 2 ], "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" } }, "authors": { @@ -63596,112 +59750,522 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "AgiBot-g1_picks_up_battery_b": { - "path": "AgiBot-g1_picks_up_battery_b", - "dataset_name": "picks_up_battery_b", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" + "Agilex_Cobot_Magic_open_drawer_bottom": { + "task_categories": [ + "robotics" ], - "scene_type": [], - "atomic_actions": [ - "place", - "pick", - "grasp" + "language": [ + "en" ], - "tasks": "Place the power supply on the operating table.", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_open_drawer_bottom", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "office_workspace", + "level2": "office", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { "object_name": "table", - "level1": "furniture", + "level1": "home_storage", "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "carton", - "level1": "tool", - "level2": "carton", + "object_name": "three_layer_transparent_drawer", + "level1": "laboratory_supplies", + "level2": "three_layer_transparent_drawer", "level3": null, "level4": null, "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "open the bottom drawer." + ], + "sub_tasks": [ + { + "subtask": "end", + "subtask_index": 0 }, { - "object_name": "battery", - "level1": "tool", - "level2": "battery", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pull open the drawer with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Abnormal", + "subtask_index": 2 + }, + { + "subtask": "Grab the bottom drawer with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Pull open the drawer with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "Grab the bottom drawer with the left gripper", + "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 } ], - "operation_platform_height": null, - "frame_range": "0-131840", - "dataset_size": "71.4GB", + "atomic_actions": [ + "grasp", + "pull" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], "statistics": { - "total_episodes": 329, - "total_frames": 131840, - "total_tasks": 1, - "total_videos": 2632, + "total_episodes": 50, + "total_frames": 18620, + "fps": 30, + "total_tasks": 7, + "total_videos": 150, "total_chunks": 1, "chunks_size": 1000, - "fps": 30 + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "182.16 MB" }, - "dataset_uuid": "69e28097-0f6e-4238-8562-b87280af0714", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the power supply on the operating table.", - "Grab and lift the power supply from the large box.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" + "frame_num": 18620, + "dataset_size": "182.16 MB", + "data_structure": "Agilex_Cobot_Magic_open_drawer_bottom_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:49" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] + }, + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } }, "authors": { "contributed_by": [ { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" } ] }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AgiBot-g1_picks_up_battery_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_picks_up_battery_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Airbot_MMK2_pick_up_and_place_tub": { + "Airbot_MMK2_play_toy_piano": { "task_categories": [ "robotics" ], @@ -63731,11 +60295,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_pick_up_and_place_tub", + "dataset_name": "Airbot_MMK2_play_toy_piano", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "bathroom", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -63743,9 +60307,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "washbasin", - "level1": "home_storage", - "level2": "washbasin", + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "toy_piano", + "level1": "toy", + "level2": "toy_piano", "level3": null, "level4": null, "level5": null @@ -63753,50 +60325,39 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the washbasin and put it down." + "pick up the piano in the middle with your left hand and play the piano with your right hand." ], "sub_tasks": [ { - "subtask": "End", + "subtask": "Play the toy piano with the right gripper", "subtask_index": 0 }, { - "subtask": "Lift the basin with the left gripper", + "subtask": "Place the toy piano on the table with the left gripper", "subtask_index": 1 }, { - "subtask": "Grasp the basin with the left gripper", + "subtask": "End", "subtask_index": 2 }, { - "subtask": "Grasp the basin with the right gripper", + "subtask": "Grasp the toy piano with the left gripper", "subtask_index": 3 }, { - "subtask": "Place basin on the table with the right gripper", + "subtask": "Lift the toy piano with the left gripper", "subtask_index": 4 }, - { - "subtask": "Place basin on the table with the left gripper", - "subtask_index": 5 - }, - { - "subtask": "Abnormal", - "subtask_index": 6 - }, - { - "subtask": "Lift the basin with the right gripper", - "subtask_index": 7 - }, { "subtask": "null", - "subtask_index": 8 + "subtask_index": 5 } ], "atomic_actions": [ "grasp", "pick", - "place" + "place", + "press" ], "robot_name": [ "Airbot_MMK2" @@ -63830,23 +60391,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 98, - "total_frames": 16632, + "total_episodes": 50, + "total_frames": 7683, "fps": 30, - "total_tasks": 9, - "total_videos": 392, + "total_tasks": 6, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "607.45 MB" + "dataset_size": "230.15 MB" }, - "frame_num": 16632, - "dataset_size": "607.45 MB", - "data_structure": "Airbot_MMK2_pick_up_and_place_tub_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 7683, + "dataset_size": "230.15 MB", + "data_structure": "Airbot_MMK2_play_toy_piano_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:97" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -64200,20 +60761,20 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "AIRBOT_MMK2_close_the_computer": { - "path": "AIRBOT_MMK2_close_the_computer", - "dataset_name": "close_the_computer", + "Cobot_Magic_clean_up_the_tableware": { + "path": "Cobot_Magic_clean_up_the_tableware", + "dataset_name": "clean_up_the_tableware", "robot_type": "", "end_effector_type": [ - "five_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ "grasp", - "place", - "pick" + "pick", + "place" ], - "tasks": "End", + "tasks": "use the left arm to grab a blue teacup", "objects": [ { "object_name": "table", @@ -64224,27 +60785,59 @@ "level5": null }, { - "object_name": "computer", - "level1": "office_supplies", - "level2": "computer", + "object_name": "bowl", + "level1": "container", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tablecloth", + "level1": "clothing", + "level2": "tablecloth", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "container", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cup", + "level1": "container", + "level2": "cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "spoon", + "level1": "tableware", + "level2": "spoon", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-7754", - "dataset_size": "320.9MB", + "frame_range": "0-18127", + "dataset_size": "1.0GB", "statistics": { - "total_episodes": 49, - "total_frames": 7754, + "total_episodes": 33, + "total_frames": 18127, "total_tasks": 1, - "total_videos": 196, + "total_videos": 99, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "5e9fe536-85e9-4e68-b390-51ba7aec5e6b", + "dataset_uuid": "ad865471-39ce-4343-92f4-8dcc8ed48830", "language": [ "en", "zh" @@ -64253,12 +60846,33 @@ "robotics" ], "sub_tasks": [ + "use the left arm to grab a blue teacup", + "Grasp the bowl with the left gripper", + "use the right arm to grab a blue rice bowl", + "Place the bowl on the plate with the left gripper", + "use the left arm to put the blue soup spoon into the blue teacup in the middle", + "Grasp the spoon with the left gripper", + "Place the spoon on the cup with the left gripper", + "use the right arm to put the blue soup spoon on the blue large plate in the middle", + "use the left arm to put the blue teacup into the blue rice bowl in the middle", + "Grasp the cup with the left gripper", + "use the right arm to put the blue soup spoon into the blue teacup in the middle", + "use the right arm to put the blue teacup into the blue rice bowl in the middle", + "use the left arm to grab a blue rice bowl", + "use the right arm to grab a blue teacup", + "Place the spoon on the cup with the right gripper", + "use the right arm to put the blue rice bowl into the blue large plate in the middle", + "abnormal", + "Grasp the cup with the right gripper", + "use the right arm to grab a blue soup spoon", + "Grasp the bowl with the right gripper", + "Grasp the spoon with the right gripper", + "Place the cup on the bowl with the right gripper", + "Place the cup on the bowl with the left gripper", + "use the left arm to grab a blue soup spoon", "End", - "Press the laptop with the left gripper", - "Release the laptop with the left gripper", - "Static", - "Abnormal", - "Close the laptop with the right gripper", + "Place the bowl on the plate with the right gripper", + "use the left arm to put the blue rice bowl into the blue large plate in the middle", "null" ], "annotations": { @@ -64296,10 +60910,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_close_the_computer_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_close_the_computer_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Cobot_Magic_clean_up_the_tableware_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_clean_up_the_tableware_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Agilex_Cobot_Magic_connect_block": { + "Airbot_MMK2_take_toy_car": { "task_categories": [ "robotics" ], @@ -64329,11 +60943,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_connect_block", + "dataset_name": "Airbot_MMK2_take_toy_car", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "living_room", + "level1": "other", + "level2": "laboratory", "level3": null, "level4": null, "level5": null @@ -64341,1243 +60955,1267 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "home_storage", + "object_name": "toy_car", + "level1": "doll", + "level2": "toy_car", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "white_small_plate", + "level1": "plates", + "level2": "white_small_plate", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "take the toy car out of the plate by hand and place it on the table." + ], + "sub_tasks": [ + { + "subtask": "Grasp the toy car on the plate and with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "Grasp the toy car on the plate and with the right gripper", + "subtask_index": 1 + }, + { + "subtask": "Abnormal", + "subtask_index": 2 + }, + { + "subtask": "End", + "subtask_index": 3 + }, + { + "subtask": "Place the toy car on the table with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "Place the toy car on the table with the left gripper", + "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 49, + "total_frames": 14333, + "fps": 30, + "total_tasks": 7, + "total_videos": 196, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "457.72 MB" + }, + "frame_num": 14333, + "dataset_size": "457.72 MB", + "data_structure": "Airbot_MMK2_take_toy_car_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:48" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "G1edu-u3_pick_up_the_bread_az": { + "path": "G1edu-u3_pick_up_the_bread_az", + "dataset_name": "pick_up_the_bread_az", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "End", + "objects": [ + { + "object_name": "table", + "level1": "furniture", "level2": "table", "level3": null, "level4": null, "level5": null }, + { + "object_name": "basket", + "level1": "home_storage", + "level2": "basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread", + "level1": "food", + "level2": "bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "bowl", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "fruit", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "grape", + "level1": "fruits", + "level2": "grape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange", + "level1": "fruits", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pear", + "level1": "fruits", + "level2": "pear", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "apple", + "level1": "fruit", + "level2": "apple", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-14542", + "dataset_size": "169.7MB", + "statistics": { + "total_episodes": 26, + "total_frames": 14542, + "total_tasks": 1, + "total_videos": 26, + "total_chunks": 1, + "chunks_size": 27, + "fps": 30 + }, + "dataset_uuid": "95b46832-04a8-4073-b033-dbe0d38fe742", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Grasp the long bread and lift it to the center of the view with left gripper", + "Grasp the long bread and lift it to the center of the view with right gripper", + "Abnormal", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_pick_up_the_bread_az_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_pick_up_the_bread_az_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Galaxea_R1_Lite_storage_object_brown_plate": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Galaxea_R1_Lite_storage_object_brown_plate", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "brown_plate", + "level1": "plates", + "level2": "brown_plate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "fruits", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_chemical_products", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_cup", + "level1": "cups", + "level2": "blue_cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_pot", + "level1": "cookware", + "level2": "blue_pot", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "toast_slices", + "level1": "bread", + "level2": "toast_slices", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_towel", + "level1": "towels", + "level2": "brown_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "can", + "level1": "snacks", + "level2": "can", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "coke(slim_can)", + "level1": "beverages", + "level2": "coke(slim_can)", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "potato_chips", + "level1": "snacks", + "level2": "potato_chips", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chocolate", + "level1": "snacks", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "compass", + "level1": "rulers", + "level2": "compass", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "block_pillar", + "level1": "building_blocks", + "level2": "block_pillar", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "egg_beater", + "level1": "spoons_and_spatulas", + "level2": "egg_beater", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "erasers", + "level1": "stationery", + "level2": "erasers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "snacks", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_chemical_products", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_lemon", + "level1": "fruits", + "level2": "green_lemon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "fruits", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "power_strip", + "level1": "electrical_control_equipment", + "level2": "power_strip", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "round_bread", + "level1": "bread", + "level2": "round_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_chemical_products", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, { "object_name": "square_building_blocks", - "level1": "toys", + "level1": "building_blocks", "level2": "square_building_blocks", "level3": null, "level4": null, "level5": null + }, + { + "object_name": "tape", + "level1": "stationery", + "level2": "tape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cake", + "level1": "bread", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "duck", + "level1": "doll", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ambrosial_yogurt", + "level1": "beverages", + "level2": "ambrosial_yogurt", + "level3": null, + "level4": null, + "level5": null } ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "According to the building block template in front, assemble the scattered blocks together." + "use a gripper to pick the target object and place on the brown plate." ], "sub_tasks": [ { - "subtask": "Put the yellow build block on the right of the green build block\n", + "subtask": "Place the blue pot on the brown plate with the right gripper", "subtask_index": 0 }, { - "subtask": "Put the blue build block on the behind of the green build block\n", + "subtask": "Grasp the blue pot with the left gripper", "subtask_index": 1 }, { - "subtask": "Put the yellow build block in the center of the table", + "subtask": "Grasp the plugboard with the left gripper", "subtask_index": 2 }, { - "subtask": "Put the blue build block on the right of the yellow build block\n", + "subtask": "Place the soft facial cleanser on the brown plate with the right gripper", "subtask_index": 3 }, { - "subtask": "Put the green build block on the red build block\n", + "subtask": "Place the back scratcher on the brown plate with the right gripper", "subtask_index": 4 }, { - "subtask": "Put the yellow build block on the behind of the green build block\n", + "subtask": "Place the blackboard erasure on the brown plate with the left gripper", "subtask_index": 5 }, { - "subtask": "Put the orange build block on the right of the yellow build block\n", + "subtask": "Grasp the potato chips with the right gripper", "subtask_index": 6 }, { - "subtask": "Put the orange build block on the right of the red build block\n", + "subtask": "Grasp the banana with the left gripper", "subtask_index": 7 }, { - "subtask": "Put the orange build block on the right of the blue build block\n", + "subtask": "Place the coke on the brown plate with the right gripper", "subtask_index": 8 }, { - "subtask": "Put the green build block on the left of the blue build block\n", + "subtask": "Place the chocolate on the brown plate with the left gripper", "subtask_index": 9 }, { - "subtask": "Put the blue build block on the left of the yellow build block\n", + "subtask": "Place the duck toy on the brown plate with the right gripper", "subtask_index": 10 }, { - "subtask": "Put the yellow build block on the right of the green build block\n", + "subtask": "Grasp the compasses with the right gripper", "subtask_index": 11 }, { - "subtask": "Put the orange build block on the front of the blue build block\n", + "subtask": "Place the peach on the brown plate with the left gripper", "subtask_index": 12 }, { - "subtask": "Put the orange build block on the left of the green build block\n", + "subtask": "Grasp the duck toy with the left gripper", "subtask_index": 13 }, { - "subtask": "Put the blue build block on the right of the green build block\n", + "subtask": "Place the round wooden block on the brown plate with the right gripper", "subtask_index": 14 }, { - "subtask": "Put the green build block on the behind of the blue build block\n", + "subtask": "Grasp the blue cup with the left gripper", "subtask_index": 15 }, { - "subtask": "Put the orange build block on the right of the build block\n", + "subtask": "Place the green lemon on the brown plate with the left gripper", "subtask_index": 16 }, { - "subtask": "Put the orange build block on the right of the green build block\n", + "subtask": "Place the shower sphere on the brown plate with the left gripper", "subtask_index": 17 }, { - "subtask": "Put the red build block on the behind of the orange build block\n", + "subtask": "Place the shower sphere on the brown plate with the right gripper", "subtask_index": 18 }, { - "subtask": "Put the red build block on the front of the yellow build block\n", + "subtask": "Grasp the back scratcher with the right gripper", "subtask_index": 19 }, { - "subtask": "Put the orange build block on the front of the yellow build block\n", + "subtask": "Grasp the square chewing gum with the left gripper", "subtask_index": 20 }, { - "subtask": "Put the blue build block on the behind of the green build block\n", + "subtask": "Grasp the chocolate cake with the right gripper", "subtask_index": 21 }, { - "subtask": "Put the orange build block in the center of the table\n", + "subtask": "Place the yogurt on the brown plate with the right gripper", "subtask_index": 22 }, { - "subtask": "Put the yellow build block in the center of the table\n", + "subtask": "Place the banana on the brown plate with the left gripper", "subtask_index": 23 }, { - "subtask": "Put the green build block on the behind of the yellow build block\n", + "subtask": "Grasp the shower sphere with the left gripper", "subtask_index": 24 }, { - "subtask": "Put the orange build block on the left of the blue build block\n", + "subtask": "Place the brown towel on the brown plate with the left gripper", "subtask_index": 25 }, { - "subtask": "Put the red build block on the right of the blue build block\n", + "subtask": "Grasp the plugboard with the right gripper", "subtask_index": 26 }, { - "subtask": "Put the yellow build block in the center of the table\n", + "subtask": "Grasp the yogurt with the right gripper", "subtask_index": 27 }, { - "subtask": "Put the red build block on the behind of the blue build block\n", + "subtask": "Place the blue cup on the brown plate with the right gripper", "subtask_index": 28 }, { - "subtask": "Put the blue build block on the behind of the orange build block\n", + "subtask": "Place the plugboard on the brown plate with the right gripper", "subtask_index": 29 }, { - "subtask": "Put the orange build block on the behind of the green build block\n", + "subtask": "Grasp the brown towel with the left gripper", "subtask_index": 30 }, { - "subtask": "Put the red build block on the right of the yellow build block\n", + "subtask": "Grasp the hard facial cleanser with the left gripper", "subtask_index": 31 }, { - "subtask": "Put the yellow build block on the front of the orange build block\n", + "subtask": "Place the duck toy on the brown plate with the left gripper", "subtask_index": 32 }, { - "subtask": "Put the blue build block on the right of the green build block\n", + "subtask": "Place the round bread on the brown plate with the right gripper", "subtask_index": 33 }, { - "subtask": "Put the blue build block on the right of the red build block\n", + "subtask": "Grasp the brown towel with the right gripper", "subtask_index": 34 }, { - "subtask": "Put the red build block on the behind of the orange build block\n", + "subtask": "Place the bread slice on the brown plate with the left gripper", "subtask_index": 35 }, { - "subtask": "Put the yellow build block on the behind of the green build block\n", + "subtask": "Grasp the hard facial cleanser with the right gripper", "subtask_index": 36 }, { - "subtask": "Put the red build block on the right of the blue build block\n", + "subtask": "Place the chocolate cake on the brown plate with the right gripper", "subtask_index": 37 }, { - "subtask": "Put the green build block on the behind of the yellow build block\n", + "subtask": "Grasp the peach with the left gripper", "subtask_index": 38 }, { - "subtask": "Put the yellow build block on the behind of the green build block\n", + "subtask": "Place the tin on the brown plate with the right gripper", "subtask_index": 39 }, { - "subtask": "Put the orange build block on the behind of the red build block\n", + "subtask": "Place the tape on the brown plate with the right gripper", "subtask_index": 40 }, { - "subtask": "Put theyellow build block on the behind of the blue build block\n", + "subtask": "Place the blackboard erasure on the brown plate with the right gripper", "subtask_index": 41 }, { - "subtask": "Put the green build block on the behind of the yellow build block\n", + "subtask": "Grasp the bread slice with the right gripper", "subtask_index": 42 }, { - "subtask": "Put the red build block on the right of the orange build block\n", + "subtask": "Place the potato chips on the brown plate with the right gripper", "subtask_index": 43 }, { - "subtask": "Put the yellow build block on the right of the red build block\n", + "subtask": "Grasp the potato chips with the left gripper", "subtask_index": 44 }, { - "subtask": "Put the blue build block on the right of the orange build block\n", + "subtask": "Place the tape on the brown plate with the left gripper", "subtask_index": 45 }, { - "subtask": "Put the green build block on the right of the yellow build block\n", + "subtask": "Grasp the duck toy with the right gripper", "subtask_index": 46 }, { - "subtask": "Put the orange build block in the center of the table\n", + "subtask": "End", "subtask_index": 47 }, { - "subtask": "Put the green build block on the orange build block\n", + "subtask": "Grasp the blackboard erasure with the left gripper", "subtask_index": 48 }, { - "subtask": "Put the yellow build block on the right of the blue build block\n", + "subtask": "Grasp the round wooden block with the left gripper", "subtask_index": 49 }, { - "subtask": "Put the green build block on the right of the red build block\n", + "subtask": "Place the brown towel on the brown plate with the right gripper", "subtask_index": 50 }, { - "subtask": "Put the green build block on the blue build block\n", + "subtask": "Place the blue cup on the brown plate with the left gripper", "subtask_index": 51 }, { - "subtask": "Put the green build block on the right of the orange build block\n", + "subtask": "Place the compasses on the brown plate with the right gripper", "subtask_index": 52 }, { - "subtask": "Put the blue build block in the center of the table\n", + "subtask": "Grasp the compasses with the left gripper", "subtask_index": 53 }, { - "subtask": "Put the orange build block on the behind of the green build block\n", + "subtask": "Place the compasses on the brown plate with the left gripper", "subtask_index": 54 }, { - "subtask": "Put the red build block on the right of the yellow build block\n", + "subtask": "Grasp the blue pot with the right gripper", "subtask_index": 55 }, { - "subtask": "Put the blue build block on the behind of the green build block\n", + "subtask": "Grasp the round bread with the right gripper", "subtask_index": 56 }, { - "subtask": "Put the orange build block on the right of the blue build block\n", + "subtask": "Grasp the chocolate cake with the left gripper", "subtask_index": 57 }, { - "subtask": "Put the yellow build block on the behind of the orange build block\n", + "subtask": "Place the potato chips on the brown plate with the left gripper", "subtask_index": 58 }, { - "subtask": "Put the orange build block in the center of the table\n", + "subtask": "Place the plugboard on the brown plate with the left gripper", "subtask_index": 59 }, { - "subtask": "Put the blue build block on the right of the yellow build block\n", + "subtask": "Place the square chewing gum on the brown plate with the right gripper", "subtask_index": 60 }, { - "subtask": "Put the orange build block on the behind of the yellow build block\n", + "subtask": "Place the banana on the brown plate with the right gripper", "subtask_index": 61 }, { - "subtask": "Put the yellow build block on the behind of the green build block\n", + "subtask": "Grasp the tin with the right gripper", "subtask_index": 62 }, { - "subtask": "Put the blue build block on the front of the orange build block\n", + "subtask": "Place the hard facial cleanser on the brown plate with the left gripper", "subtask_index": 63 }, { - "subtask": "Put the green build block on the behind of the red build block\n", + "subtask": "Place the square wooden block on the brown plate with the left gripper", "subtask_index": 64 }, { - "subtask": "Put the green build block in the center of the table\n", + "subtask": "Place the square chewing gum on the brown plate with the left gripper", "subtask_index": 65 }, { - "subtask": "Put the green build block on the right of the yellow build block\n", + "subtask": "Grasp the tape with the right gripper", "subtask_index": 66 }, { - "subtask": "Put the red build block on the right of the blue build block\n", + "subtask": "Grasp the coke with the right gripper", "subtask_index": 67 }, { - "subtask": "Put the red build block on the behind of the green build block\n", + "subtask": "Grasp the tape with the left gripper", "subtask_index": 68 }, { - "subtask": "Put the blue build block on the behind of the orange build block\n", + "subtask": "Grasp the square chewing gum with the right gripper", "subtask_index": 69 }, { - "subtask": "Put the yellow build block on the right of the red build block\n", + "subtask": "Grasp the shower sphere with the right gripper", "subtask_index": 70 }, { - "subtask": "Put the blue build block in the center of the table\n", + "subtask": "Place the square wooden block on the brown plate with the right gripper", "subtask_index": 71 }, { - "subtask": "Put the red build block on the behind of the yellow build block\n", + "subtask": "Place the hard facial cleanser on the brown plate with the right gripper", "subtask_index": 72 }, { - "subtask": "Put the yellow build block on the right of the red build block\n", + "subtask": "Grasp the bread slice with the left gripper", "subtask_index": 73 }, { - "subtask": "Put the green build block on the right of the orange build block\n", + "subtask": "Grasp the blackboard erasure with the right gripper", "subtask_index": 74 }, { - "subtask": "Put the red build block on the blue build block\n", + "subtask": "Place the bread slice on the brown plate with the right gripper", "subtask_index": 75 }, { - "subtask": "Put the orange build block on the left of the yellow build block\n", + "subtask": "Grasp the round bread with the left gripper", "subtask_index": 76 }, { - "subtask": "Put the green build block on the right of the red build block\n", + "subtask": "Grasp the chocolate with the left gripper", "subtask_index": 77 }, { - "subtask": "Put the red build block on the left of the orange build block\n", + "subtask": "Grasp the square wooden block with the left gripper", "subtask_index": 78 }, { - "subtask": "Put the orange build block on the right of the blue build block\n", + "subtask": "Grasp the blue cup with the right gripper", "subtask_index": 79 }, { - "subtask": "Put the red build block on the right of the orange build block\n", + "subtask": "Grasp the soft facial cleanser with the right gripper", "subtask_index": 80 }, { - "subtask": "Put the orange build block on the behind of the yellow build block\n", + "subtask": "Place the chocolate cake on the brown plate with the left gripper", "subtask_index": 81 }, { - "subtask": "Put the yellow build block in the center of the table\n", + "subtask": "Grasp the square wooden block with the right gripper", "subtask_index": 82 }, { - "subtask": "Put the yellow build block on the behind of the red build block\n", + "subtask": "Grasp the green lemon with the left gripper", "subtask_index": 83 }, { - "subtask": "Put the yellow build block on the behind of the blue build block\n", + "subtask": "Place the round wooden block on the brown plate with the left gripper", "subtask_index": 84 }, { - "subtask": "Put the orange build block on the behind of the red build block\n", + "subtask": "Place the round bread on the brown plate with the left gripper", "subtask_index": 85 }, { - "subtask": "Put the yellow build block in the center of the table\n", + "subtask": "Place the blue pot on the brown plate with the left gripper", "subtask_index": 86 }, { - "subtask": "Put the green build block on the right of the blue build block\n", + "subtask": "Grasp the round wooden block with the right gripper", "subtask_index": 87 }, { - "subtask": "Put the orange build block on the right of the yellow build block\n", + "subtask": "Grasp the banana with the right gripper", "subtask_index": 88 }, - { - "subtask": "Put the blue build block on the right of the yellow build block\n", - "subtask_index": 89 - }, - { - "subtask": "Put the red build block on the right of the green build block\n", - "subtask_index": 90 - }, - { - "subtask": "Put the orange build block on the behind of the yellow build block\n", - "subtask_index": 91 - }, - { - "subtask": "Put the yellow build block on the blue build block\n", - "subtask_index": 92 - }, - { - "subtask": "Put the red build block on the right of the orange build block\n", - "subtask_index": 93 - }, - { - "subtask": "Put the bluebuild block on the right of the yellow build block\n", - "subtask_index": 94 - }, - { - "subtask": "Put the red build block in the center of the table\n", - "subtask_index": 95 - }, - { - "subtask": "Put the red build block on the blue build block\n", - "subtask_index": 96 - }, - { - "subtask": "Put the orange build block on the right of the blue build block\n", - "subtask_index": 97 - }, - { - "subtask": "Put the blue build block on the behind of the orange build block\n", - "subtask_index": 98 - }, - { - "subtask": "Put the yellow build block on the right of the red build block\n", - "subtask_index": 99 - }, - { - "subtask": "\nPut the yellow build block on the behind of the green build block\n", - "subtask_index": 100 - }, - { - "subtask": "Put the yellow build block on the right of the orange build block\n", - "subtask_index": 101 - }, - { - "subtask": "Put the orange build block on the behind of the blue build block\n", - "subtask_index": 102 - }, - { - "subtask": "Put the orange build block on the right of the red build block\n", - "subtask_index": 103 - }, - { - "subtask": "Put the green build block on the behind of the orange build block\n", - "subtask_index": 104 - }, - { - "subtask": "Put the blue build block on the behind of the yellow build block\n", - "subtask_index": 105 - }, - { - "subtask": "Put the yellow build block on the right of the orange build block\n", - "subtask_index": 106 - }, - { - "subtask": "Put the green build block on the behind of the yellow build block\n", - "subtask_index": 107 - }, - { - "subtask": "Put the green build block on the right of the red build block\n", - "subtask_index": 108 - }, - { - "subtask": "Put the red build block on the front of the blue build block\n", - "subtask_index": 109 - }, - { - "subtask": "Put the red build block on the behind of the green' build block\n", - "subtask_index": 110 - }, - { - "subtask": "Put the red build block on the behind of the orange build block\n", - "subtask_index": 111 - }, - { - "subtask": "Put the red build block on the left of the green build block\n", - "subtask_index": 112 - }, - { - "subtask": "Put the green build block on the behind of the yellow build block\n", - "subtask_index": 113 - }, - { - "subtask": "Put blue the build block on the right of the red build block\n", - "subtask_index": 114 - }, - { - "subtask": "Put the green build block on the orange build block\n", - "subtask_index": 115 - }, - { - "subtask": "Put the orange build block on the right of the red build block\n", - "subtask_index": 116 - }, - { - "subtask": "Put the blue build block on the right of the green build block\n", - "subtask_index": 117 - }, - { - "subtask": "Put the yellow build block on the right of the blue build block\n", - "subtask_index": 118 - }, - { - "subtask": "Put the orange build block on the behind of the blue build block\n", - "subtask_index": 119 - }, - { - "subtask": "\nPut the yellow build block in the center of the table", - "subtask_index": 120 - }, - { - "subtask": "Put the yellow build block on the right of the green build block\n", - "subtask_index": 121 - }, - { - "subtask": "Put the green build block on the behind of the yellow build block\n", - "subtask_index": 122 - }, - { - "subtask": "Put the blue build block in the center of the table\n", - "subtask_index": 123 - }, - { - "subtask": "Put the orange build block in the center of the table\n", - "subtask_index": 124 - }, - { - "subtask": "Put the yellow build block on the right of the green build block\n", - "subtask_index": 125 - }, - { - "subtask": "Put the red build block on the right of the yellow build block\n", - "subtask_index": 126 - }, - { - "subtask": "Put the yellow build block on the behind of the red build block\n", - "subtask_index": 127 - }, - { - "subtask": "Put the red build block on the behind of the green build block\n", - "subtask_index": 128 - }, - { - "subtask": "Put the green build block on the behind of the blue build block", - "subtask_index": 129 - }, - { - "subtask": "Put the green build block on the behind of the orange build block\n", - "subtask_index": 130 - }, - { - "subtask": "Put the blue build block on the right of the green build block\n", - "subtask_index": 131 - }, - { - "subtask": "Put the green build block in the center of the table\n", - "subtask_index": 132 - }, - { - "subtask": "Put the green build block on the behind of the blue build block", - "subtask_index": 133 - }, - { - "subtask": "Put the green build block on the left of the orange build block\n", - "subtask_index": 134 - }, - { - "subtask": "Put the red build block on the behind of the yellow build block\n", - "subtask_index": 135 - }, - { - "subtask": "Put the yellow build block on the right of the green build block\n", - "subtask_index": 136 - }, - { - "subtask": "Put the blue build block on the behind of the red build block\n", - "subtask_index": 137 - }, - { - "subtask": "Put the orange build block on the behind of the red build block\n", - "subtask_index": 138 - }, - { - "subtask": "Put the blue build block on the behind of the green build block\n", - "subtask_index": 139 - }, - { - "subtask": "Put the yellow build block on the blue build block\n", - "subtask_index": 140 - }, - { - "subtask": "Put the red build block on the front of the green build block\n", - "subtask_index": 141 - }, - { - "subtask": "Put the red build block on the left of the ornage build block\n", - "subtask_index": 142 - }, - { - "subtask": "Put the green build block in the center of the table\n", - "subtask_index": 143 - }, - { - "subtask": "Put the red build block on the right of the green build block\n", - "subtask_index": 144 - }, - { - "subtask": "Put the yellow build block on the right of the orange build block\n", - "subtask_index": 145 - }, - { - "subtask": "Put the yellow build block on the right of the blue build block\n", - "subtask_index": 146 - }, - { - "subtask": "Put the XX build block on the XX build block", - "subtask_index": 147 - }, - { - "subtask": "Put the yellow build block in the center of the table\n", - "subtask_index": 148 - }, - { - "subtask": "Put the orange build block on the green build block\n", - "subtask_index": 149 - }, - { - "subtask": "End", - "subtask_index": 150 - }, - { - "subtask": "Put the blue build block on the right of the red build block\n", - "subtask_index": 151 - }, - { - "subtask": "Put the green build block on the right of the orange build block\n", - "subtask_index": 152 - }, - { - "subtask": "Put the greem build block on the behind of the blue build block\n", - "subtask_index": 153 - }, - { - "subtask": "Put the left build block on the left of the green build block\n", - "subtask_index": 154 - }, - { - "subtask": "Put the green build block on the right of the orange build block\n", - "subtask_index": 155 - }, - { - "subtask": "Put the red build block on the left of the blue build block\n", - "subtask_index": 156 - }, - { - "subtask": "Put the blue build block on the right of the orange build block\n", - "subtask_index": 157 - }, - { - "subtask": "Put the yellow build block on the behind of the orange build block\n", - "subtask_index": 158 - }, - { - "subtask": "Put the green build block on the behind of the blue build block\n", - "subtask_index": 159 - }, - { - "subtask": "Put the yellow build block on the right of the orange build block\n", - "subtask_index": 160 - }, - { - "subtask": "Put the blue build block on the behind of the orange build block\n", - "subtask_index": 161 - }, - { - "subtask": "Put the red build block on the front of the green build block\n", - "subtask_index": 162 - }, - { - "subtask": "Put the red build block on the front of the yellow build block\n", - "subtask_index": 163 - }, - { - "subtask": "Put the yellow build block on the behind of the blue build block\n", - "subtask_index": 164 - }, - { - "subtask": "Put the blue build block on the orange build block\n", - "subtask_index": 165 - }, - { - "subtask": "Put the green build block in the center of the table\n", - "subtask_index": 166 - }, - { - "subtask": "Put the orange build block on the front of the blue build block\n", - "subtask_index": 167 - }, - { - "subtask": "Put the blue build block on the right of the yellow build block\n", - "subtask_index": 168 - }, - { - "subtask": "Put the blue build block on the left of the yellow build block\n", - "subtask_index": 169 - }, - { - "subtask": "Put the orange build block on the right of the yellow build block\n", - "subtask_index": 170 - }, - { - "subtask": "Put the orange build block on the left of the red build block\n", - "subtask_index": 171 - }, - { - "subtask": "Put the green build block in the center of the table\n", - "subtask_index": 172 - }, - { - "subtask": "Put the red build block on the right of the blue build block\n", - "subtask_index": 173 - }, - { - "subtask": "Put the blue build block on the right of the yellow build block", - "subtask_index": 174 - }, - { - "subtask": "Put the green build block on the right of the orange build block\n", - "subtask_index": 175 - }, - { - "subtask": "Put the blue build block in the center of the table\n", - "subtask_index": 176 - }, - { - "subtask": "Put the green build block on the right of the red build block\n", - "subtask_index": 177 - }, - { - "subtask": "Put the green build block on the right of the blue build block\n", - "subtask_index": 178 - }, - { - "subtask": "Put the yellow build block in the center of the table\n", - "subtask_index": 179 - }, - { - "subtask": "Abnormal", - "subtask_index": 180 - }, - { - "subtask": "Put the red build block on the behind of the orange build block\n", - "subtask_index": 181 - }, - { - "subtask": "Put the red build block on the yellow build block\n", - "subtask_index": 182 - }, - { - "subtask": "Put the orange build block on the right of the yellow build block\n", - "subtask_index": 183 - }, - { - "subtask": "Put the red build block on the right of the yellow build block\n", - "subtask_index": 184 - }, - { - "subtask": "Put the orange build block on the right of the blue build block\n", - "subtask_index": 185 - }, - { - "subtask": "Put the orange build block on the orange build block\n", - "subtask_index": 186 - }, - { - "subtask": "Put the blue build block on the behind of the red build block\n", - "subtask_index": 187 - }, - { - "subtask": "Put the yellow build block on the right of the blue build block\n", - "subtask_index": 188 - }, - { - "subtask": "Put the yellow build block on the behind of the green build block\n", - "subtask_index": 189 - }, - { - "subtask": "Put the yellow build block on the left of the blue build block\n", - "subtask_index": 190 - }, - { - "subtask": "Put the yellow build block on the right of the orange build block\n", - "subtask_index": 191 - }, - { - "subtask": "Put the yellow build block on the blue build block\n", - "subtask_index": 192 - }, - { - "subtask": "Put the yellow build block on the behind of the blue build block\n", - "subtask_index": 193 - }, - { - "subtask": "Put the green build block on the right of the blue build block\n", - "subtask_index": 194 - }, - { - "subtask": "Put the orange build block on the right of the yellow build block\n", - "subtask_index": 195 - }, - { - "subtask": "Put the red build block on the behind of the green build block\n", - "subtask_index": 196 - }, - { - "subtask": "Put the red build block in the center of the table\n", - "subtask_index": 197 - }, - { - "subtask": "Put the yellow build block on the behind of the blue build block\n", - "subtask_index": 198 - }, - { - "subtask": "Put the green build block on the behind of the orange build block\n", - "subtask_index": 199 - }, - { - "subtask": "Put the yellow build block on the behind of the red build block\n", - "subtask_index": 200 - }, - { - "subtask": "Put the green build block on the right of the yellow build block\n", - "subtask_index": 201 - }, - { - "subtask": "Put the green build block on the red build block\n", - "subtask_index": 202 - }, - { - "subtask": "Put the blue build block on the right of the orange build block\n", - "subtask_index": 203 - }, - { - "subtask": "Put the red build block on the behind of the orange build block\n", - "subtask_index": 204 - }, - { - "subtask": "Put the red build block in the center of the table\n", - "subtask_index": 205 - }, - { - "subtask": "Put the red build block in the center of the table\n", - "subtask_index": 206 - }, - { - "subtask": "Put the green build block on the right of the orange build block\n", - "subtask_index": 207 - }, - { - "subtask": "Put the green build block on the front of the blue build block\n", - "subtask_index": 208 - }, - { - "subtask": "Put the yellow build block on the behind of the orange build block\n", - "subtask_index": 209 - }, - { - "subtask": "Put the yellow build block on the behind of the blue build block\n", - "subtask_index": 210 - }, - { - "subtask": "Put the green build block on the blue build block\n", - "subtask_index": 211 - }, - { - "subtask": "Put the blue build block on the right of the yellow build block", - "subtask_index": 212 - }, - { - "subtask": "Put the blue build block on the right of the orange build block\n", - "subtask_index": 213 - }, - { - "subtask": "Put the orange build block on the left of the red build block\n", - "subtask_index": 214 - }, - { - "subtask": "Put the orange build block on the right of the blue build block\n", - "subtask_index": 215 - }, - { - "subtask": "Put the blue build block on the front of the green build block\n", - "subtask_index": 216 - }, - { - "subtask": "Put the yellow build block on the orange build block\n", - "subtask_index": 217 - }, - { - "subtask": "Put the yellow build block on the front of the red build block\n", - "subtask_index": 218 - }, - { - "subtask": "Put the orange build block on the behind of the red build block\n", - "subtask_index": 219 - }, - { - "subtask": "Put the yellow build block on the behind of the green build block\n", - "subtask_index": 220 - }, - { - "subtask": "Put the blue build block on the right of orange the build block\n", - "subtask_index": 221 - }, - { - "subtask": "Put the red build block on the right of the blue build block\n", - "subtask_index": 222 - }, - { - "subtask": "Put the green build block on the behind of the orange build block\n", - "subtask_index": 223 - }, - { - "subtask": "Put the green build block on the behind of the yellow build block\n", - "subtask_index": 224 - }, - { - "subtask": "Put the yellow build block on the right of the green build block\n", - "subtask_index": 225 - }, - { - "subtask": "Put the orange build block on the right of the green build block\n", - "subtask_index": 226 - }, - { - "subtask": "Put the orange build block on the right of the yellow build block\n", - "subtask_index": 227 - }, - { - "subtask": "Put the red build block on the green build block\n", - "subtask_index": 228 - }, - { - "subtask": "Put the red build block on the right of the blue build block\n", - "subtask_index": 229 - }, - { - "subtask": "Put the blue build block on the behind of the green build block\n", - "subtask_index": 230 - }, - { - "subtask": "Put the orange build block on the right of the red build block\n", - "subtask_index": 231 - }, - { - "subtask": "Put the blue build block on the red build block\n", - "subtask_index": 232 - }, - { - "subtask": "Put the orange build block on the right of the blue build block\n", - "subtask_index": 233 - }, - { - "subtask": "Put the orange build block on the left of the yellow build block\n", - "subtask_index": 234 - }, - { - "subtask": "Put the right build block on the right of the yellow build block\n", - "subtask_index": 235 - }, - { - "subtask": "Put the green build block on the right of the red build block\n", - "subtask_index": 236 - }, - { - "subtask": "Put the orange build block on the right of the green build block\n", - "subtask_index": 237 - }, - { - "subtask": "Put the blue build block in the center of the table\n", - "subtask_index": 238 - }, - { - "subtask": "Put the orange build block on the behind of the yellow build block\n", - "subtask_index": 239 - }, - { - "subtask": "Put the orange build block on the right of the blue build block\n", - "subtask_index": 240 - }, - { - "subtask": "Put the orange build block on the behind of the blue build block\n", - "subtask_index": 241 - }, - { - "subtask": "Put the red build block on the behind of the yellow build block\n", - "subtask_index": 242 - }, - { - "subtask": "Put the ornage build block on the behind of the green build block\n", - "subtask_index": 243 - }, - { - "subtask": "Put the blue build block on the left of the yellow build block\n", - "subtask_index": 244 - }, - { - "subtask": "Put the green build block on the front of the blue build block\n", - "subtask_index": 245 - }, - { - "subtask": "Put the green build block on the behind of the red build block\n", - "subtask_index": 246 - }, - { - "subtask": "Put the green build block on the right of the orange build block\n", - "subtask_index": 247 - }, - { - "subtask": "Put the green build block on the right of the orange build block\n", - "subtask_index": 248 - }, - { - "subtask": "Put the yellow build block on the behind of the red build block\n", - "subtask_index": 249 - }, - { - "subtask": "Put the blue build block on the green build block\n", - "subtask_index": 250 - }, - { - "subtask": "Put the blue build block on the right of the orange build block", - "subtask_index": 251 - }, - { - "subtask": "Put the orange build block on the right of the green build block\n", - "subtask_index": 252 - }, - { - "subtask": "Put the orange build block on the behind of the yellow build block\n", - "subtask_index": 253 - }, - { - "subtask": "Put the green build block in the center of the table\n", - "subtask_index": 254 - }, - { - "subtask": "Put the blue build block on the behind of the orange build block\n", - "subtask_index": 255 - }, - { - "subtask": "Put the green build block on the left of the orange build block", - "subtask_index": 256 - }, - { - "subtask": "Put the green build block on the right of the yellow build block\n", - "subtask_index": 257 - }, - { - "subtask": "Put the green build block on the left of the blue build block\n", - "subtask_index": 258 - }, - { - "subtask": "Put the orange build block on the right of the green build block\n", - "subtask_index": 259 - }, - { - "subtask": "Put the green build block on the orange build block\n", - "subtask_index": 260 - }, - { - "subtask": "Put the blue build block on the behind of the green build block\n", - "subtask_index": 261 - }, - { - "subtask": "Put the yellow build block on the behind of the red build block\n", - "subtask_index": 262 - }, - { - "subtask": "Put the orange build block on the green build block\n", - "subtask_index": 263 - }, - { - "subtask": "Put the red build block on the behind of the blue build block\n", - "subtask_index": 264 - }, - { - "subtask": "Put the blue build block on the right of the orange build block\n", - "subtask_index": 265 - }, - { - "subtask": "move the build block to the center of the table", - "subtask_index": 266 - }, - { - "subtask": "Put the green build block on the right of the red build block\n", - "subtask_index": 267 - }, - { - "subtask": "Put the red build block on the behind of the green build block\n", - "subtask_index": 268 - }, - { - "subtask": "Put the red build block on the behind of the blue build block\n", - "subtask_index": 269 - }, - { - "subtask": "Put the yellow build block on the behind of the orange build block\n", - "subtask_index": 270 - }, - { - "subtask": "Put the red build block on the right of the orange build block\n", - "subtask_index": 271 - }, - { - "subtask": "Put the green build block on the right of the yellow build block\n", - "subtask_index": 272 - }, - { - "subtask": "Put the red build block on the right of the yellow build block\n", - "subtask_index": 273 - }, - { - "subtask": "Put the red build block in the center of the table\n", - "subtask_index": 274 - }, - { - "subtask": "Put the red build block on the right of the blue build block", - "subtask_index": 275 - }, - { - "subtask": "Put the yellow build block on the behind of the green build block", - "subtask_index": 276 - }, - { - "subtask": "Put the red build block on the right of the orange build block\n", - "subtask_index": 277 - }, - { - "subtask": "Put the blue build block on the right of the orange build block\n", - "subtask_index": 278 - }, - { - "subtask": "Put the orange build block on the behind of the green build block\n", - "subtask_index": 279 - }, - { - "subtask": "Put the blue build block on the front of the green build block\n", - "subtask_index": 280 - }, - { - "subtask": "Put the ornage build block on the behind of the blue build block\n", - "subtask_index": 281 - }, - { - "subtask": "Put the yellow build block on the behind of the orange build block\n", - "subtask_index": 282 - }, - { - "subtask": "Put the green build block on the behind of the blue build block\n", - "subtask_index": 283 - }, - { - "subtask": "Put the green build block on the left of the blue build block\n", - "subtask_index": 284 - }, - { - "subtask": "Put the blue build block in the center of the table\n", - "subtask_index": 285 - }, - { - "subtask": "Put the green build block on the right of the yellow build block\n", - "subtask_index": 286 - }, - { - "subtask": "Put the yellow build block on the behind of the blue build block", - "subtask_index": 287 - }, - { - "subtask": "Put the green build block on the right of the orange build block\n", - "subtask_index": 288 - }, - { - "subtask": "Put the green build block on the blue build block\n", - "subtask_index": 289 - }, - { - "subtask": "Put the orange build block on the behind of the blue build block\n", - "subtask_index": 290 - }, - { - "subtask": "Put the red build block on the behind of the yellow build block\n", - "subtask_index": 291 - }, - { - "subtask": "Put the orange build block on the blue build block\n", - "subtask_index": 292 - }, - { - "subtask": "Put the green build block on the right of the orange build block\n", - "subtask_index": 293 - }, - { - "subtask": "Put the orange build block on the left of the red build block\n", - "subtask_index": 294 - }, - { - "subtask": "Put the green build block in the center of the table", - "subtask_index": 295 - }, { "subtask": "null", - "subtask_index": 296 + "subtask_index": 89 } ], "atomic_actions": [ "grasp", - "lift", - "lower" + "pick", + "place" ], "robot_name": [ - "Agilex_Cobot_Magic" + "Galaxea_R1_Lite" ], "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_chest_rgb" + "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_chest_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -65588,30 +62226,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 238, - "total_frames": 232528, + "total_episodes": 102, + "total_frames": 16390, "fps": 30, - "total_tasks": 297, - "total_videos": 952, + "total_tasks": 90, + "total_videos": 408, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, + "state_dim": 14, + "action_dim": 14, "camera_views": 4, - "dataset_size": "2.20 GB" + "dataset_size": "615.40 MB" }, - "frame_num": 232528, - "dataset_size": "2.20 GB", - "data_structure": "Agilex_Cobot_Magic_connect_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (226 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_chest_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 16390, + "dataset_size": "615.40 MB", + "data_structure": "Galaxea_R1_Lite_storage_object_brown_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (90 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:237" + "train": "0:101" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -65620,8 +62258,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -65630,11 +62268,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_head_right_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -65643,8 +62281,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -65653,11 +62291,11 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -65666,8 +62304,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -65676,11 +62314,11 @@ "has_audio": false } }, - "observation.images.cam_front_chest_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -65689,8 +62327,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -65702,7 +62340,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 26 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -65711,32 +62349,20 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_gripper_open", + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -65745,26 +62371,14 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_gripper_open", + "right_gripper_open" ] }, "timestamp": { @@ -65804,17 +62418,17 @@ }, "subtask_annotation": { "names": null, - "dtype": "int32", "shape": [ 5 - ] + ], + "dtype": "int32" }, "scene_annotation": { "names": null, - "dtype": "int32", "shape": [ 1 - ] + ], + "dtype": "int32" }, "eef_sim_pose_state": { "names": [ @@ -65831,10 +62445,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_sim_pose_action": { "names": [ @@ -65851,130 +62465,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, - "gripper_mode_state": { + "gripper_open_scale_state": { "names": [ - "left_gripper_mode", - "right_gripper_mode" + "left_gripper_open_scale", + "right_gripper_open_scale" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "float32" }, - "gripper_mode_action": { + "gripper_open_scale_action": { "names": [ - "left_gripper_mode", - "right_gripper_mode" + "left_gripper_open_scale", + "right_gripper_open_scale" ], - "dtype": "int32", "shape": [ 2 - ] - }, - "gripper_activity_state": { + ], + "dtype": "float32" + }, + "gripper_mode_state": { "names": [ - "left_gripper_activity", - "right_gripper_activity" + "left_gripper_mode", + "right_gripper_mode" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, - "gripper_activity_action": { + "gripper_mode_action": { "names": [ - "left_gripper_activity", - "right_gripper_activity" + "left_gripper_mode", + "right_gripper_mode" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, - "gripper_open_scale_state": { + "gripper_activity_state": { "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" + "left_gripper_activity", + "right_gripper_activity" ], - "dtype": "float32", "shape": [ 2 - ] + ], + "dtype": "int32" }, - "gripper_open_scale_action": { + "gripper_activity_action": { "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" + "left_gripper_activity", + "right_gripper_activity" ], - "dtype": "float32", "shape": [ 2 - ] + ], + "dtype": "int32" } }, "authors": { @@ -65996,9 +62610,221 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_chest_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Galaxea_R1_Lite_storage_object_yellow_basket": { + "Cobot_Magic_move_beverage": { + "path": "Cobot_Magic_move_beverage", + "dataset_name": "move_beverage", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "End", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_bottle_beverage", + "level1": "drink", + "level2": "green_bottle_beverage", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "red_bottle_beverage", + "level1": "drink", + "level2": "red_bottle_beverage", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "black_bottle_beverage", + "level1": "drink", + "level2": "black_bottle_beverage", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-31813", + "dataset_size": "636.5MB", + "statistics": { + "total_episodes": 100, + "total_frames": 31813, + "total_tasks": 1, + "total_videos": 300, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "b6d0e7b1-1a5c-4188-bdca-cab2a7c613ed", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Grasp the beverage bottle placed at the back of the desk", + "Place the drink at the front of the table", + "Pick up the drink", + "Place the beverage bottle to the front of the table", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_move_beverage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_move_beverage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "R1_Lite_put_the_shoes_into_the_shoe_box": { + "path": "R1_Lite_put_the_shoes_into_the_shoe_box", + "dataset_name": "put_the_shoes_into_the_shoe_box", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Put the shoes out of the shoe box", + "objects": [ + { + "object_name": "shoes", + "level1": "clothing", + "level2": "shoes", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "shoe_box", + "level1": "container", + "level2": "shoe_box", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-87219", + "dataset_size": "3.8GB", + "statistics": { + "total_episodes": 106, + "total_frames": 87219, + "total_tasks": 1, + "total_videos": 318, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "87e16c2a-8d5b-4e0b-9706-cfae7e2b29e9", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Put the shoes out of the shoe box", + "abnormal", + "Put the shoes into the shoe box", + "Open the shoe box", + "Close the shoe box", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_put_the_shoes_into_the_shoe_box_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_put_the_shoes_into_the_shoe_box_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Agilex_Cobot_Magic_move_mouse": { "task_categories": [ "robotics" ], @@ -66028,11 +62854,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_storage_object_yellow_basket", + "dataset_name": "Agilex_Cobot_Magic_move_mouse", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "living_room", + "level1": "office & workspace", + "level2": "office", "level3": null, "level4": null, "level5": null @@ -66040,217 +62866,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "yellow_basket", - "level1": "basket", - "level2": "yellow_basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "fruits", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bathing_in_flowers", - "level1": "daily_chemical_products", - "level2": "bathing_in_flowers", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "blue_cup", - "level1": "cups", - "level2": "blue_cup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "blue_pot", - "level1": "cookware", - "level2": "blue_pot", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "toast_slices", - "level1": "bread", - "level2": "toast_slices", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "brown_towel", - "level1": "towels", - "level2": "brown_towel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "can", - "level1": "snacks", - "level2": "can", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "coke(slim_can)", - "level1": "beverages", - "level2": "coke(slim_can)", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "potato_chips", - "level1": "snacks", - "level2": "potato_chips", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chocolate", - "level1": "snacks", - "level2": "chocolate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "compass", - "level1": "stationery", - "level2": "compass", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "building_blocks", - "level1": "toys", - "level2": "building_blocks", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_beater", - "level1": "kitchen_supplies", - "level2": "egg_beater", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "eraser", - "level1": "stationery", - "level2": "eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chewing_gum", - "level1": "snacks", - "level2": "chewing_gum", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mentholatum_facial_cleanser", - "level1": "daily_chemical_products", - "level2": "mentholatum_facial_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "green_lemon", - "level1": "fruits", - "level2": "green_lemon", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peach", - "level1": "fruits", - "level2": "peach", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "power_strip", - "level1": "electrical_control_equipment", - "level2": "power_strip", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "round_bread", - "level1": "bread", - "level2": "round_bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mentholatum_facial_cleanser", - "level1": "daily_necessities", - "level2": "mentholatum_facial_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "square_building_blocks", - "level1": "building_blocks", - "level2": "square_building_blocks", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tape", - "level1": "stationery", - "level2": "tape", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cake", - "level1": "food", - "level2": "cake", + "object_name": "mouse", + "level1": "appliances", + "level2": "mouse", "level3": null, "level4": null, "level5": null }, { - "object_name": "duck", - "level1": "toys", - "level2": "duck", + "object_name": "mouse_pad", + "level1": "appliances", + "level2": "mouse_pad", "level3": null, "level4": null, "level5": null }, { - "object_name": "ambrosial_yogurt", - "level1": "beverages", - "level2": "ambrosial_yogurt", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, "level5": null @@ -66258,413 +62892,63 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "use a gripper to pick the target object and place on the yellow basket." + "the right gripper organize the mouse on the mouse pad." ], "sub_tasks": [ { - "subtask": "Grasp the blue pot with the left gripper", + "subtask": "Grasp the mouse with the left gripper", "subtask_index": 0 }, { - "subtask": "Grasp the plugboard with the left gripper", + "subtask": "Abnormal", "subtask_index": 1 }, { - "subtask": "Place the round wooden block on the yellow basket with the left gripper", + "subtask": "Grasp the mouse with the right gripper", "subtask_index": 2 }, { - "subtask": "Grasp the potato chips with the right gripper", + "subtask": "Place the mouse on the mouse mat with the right gripper", "subtask_index": 3 }, { - "subtask": "Place the tin on the yellow basket with the right gripper", + "subtask": "End", "subtask_index": 4 }, { - "subtask": "Place the plugboard on the yellow basket with the left gripper", + "subtask": "Place the mouse on the mouse mat with the left gripper", "subtask_index": 5 }, - { - "subtask": "Place the peach on the yellow basket with the left gripper", - "subtask_index": 6 - }, - { - "subtask": "Place the blue cup on the yellow basket with the right gripper", - "subtask_index": 7 - }, - { - "subtask": "Place the brown towel on the yellow basket with the right gripper", - "subtask_index": 8 - }, - { - "subtask": "Grasp the compasses with the right gripper", - "subtask_index": 9 - }, - { - "subtask": "Place the green lemon on the yellow basket with the right gripper", - "subtask_index": 10 - }, - { - "subtask": "Place the duck toy on the yellow basket with the right gripper", - "subtask_index": 11 - }, - { - "subtask": "Grasp the duck toy with the left gripper", - "subtask_index": 12 - }, - { - "subtask": "Place the blackboard erasure on the yellow basket with the left gripper", - "subtask_index": 13 - }, - { - "subtask": "Grasp the blue cup with the left gripper", - "subtask_index": 14 - }, - { - "subtask": "Place the bread slice on the yellow basket with the left gripper", - "subtask_index": 15 - }, - { - "subtask": "Place the hard facial cleanser on the yellow basket with the right gripper", - "subtask_index": 16 - }, - { - "subtask": "Place the peach on the yellow basket with the right gripper", - "subtask_index": 17 - }, - { - "subtask": "Grasp the back scratcher with the right gripper", - "subtask_index": 18 - }, - { - "subtask": "Place the tape on the yellow basket with the right gripper", - "subtask_index": 19 - }, - { - "subtask": "Place the blue pot on the yellow basket with the right gripper", - "subtask_index": 20 - }, - { - "subtask": "Grasp the chocolate cake with the right gripper", - "subtask_index": 21 - }, - { - "subtask": "Place the tape on the yellow basket with the left gripper", - "subtask_index": 22 - }, - { - "subtask": "Place the shower sphere on the yellow basket with the left gripper", - "subtask_index": 23 - }, - { - "subtask": "Place the yogurt on the yellow basket with the left gripper", - "subtask_index": 24 - }, - { - "subtask": "Grasp the shower sphere with the left gripper", - "subtask_index": 25 - }, - { - "subtask": "Grasp the plugboard with the right gripper", - "subtask_index": 26 - }, - { - "subtask": "Grasp the tin with the left gripper", - "subtask_index": 27 - }, - { - "subtask": "Grasp the brown towel with the left gripper", - "subtask_index": 28 - }, - { - "subtask": "Place the compasses on the yellow basket with the right gripper", - "subtask_index": 29 - }, - { - "subtask": "Place the hard facial cleanser on the yellow basket with the left gripper", - "subtask_index": 30 - }, - { - "subtask": "Grasp the hard facial cleanser with the left gripper", - "subtask_index": 31 - }, - { - "subtask": "Place the potato chips on the yellow basket with the left gripper", - "subtask_index": 32 - }, - { - "subtask": "Grasp the chocolate with the right gripper", - "subtask_index": 33 - }, - { - "subtask": "Place the banana on the yellow basket with the right gripper", - "subtask_index": 34 - }, - { - "subtask": "Grasp the peach with the right gripper", - "subtask_index": 35 - }, - { - "subtask": "Grasp the brown towel with the right gripper", - "subtask_index": 36 - }, - { - "subtask": "Place the chocolate cake on the yellow basket with the left gripper", - "subtask_index": 37 - }, - { - "subtask": "Place the shower sphere on the yellow basket with the right gripper", - "subtask_index": 38 - }, - { - "subtask": "Grasp the hard facial cleanser with the right gripper", - "subtask_index": 39 - }, - { - "subtask": "Place the back scratcher on the yellow basket with the right gripper", - "subtask_index": 40 - }, - { - "subtask": "Grasp the peach with the left gripper", - "subtask_index": 41 - }, - { - "subtask": "Place the blue cup on the yellow basket with the left gripper", - "subtask_index": 42 - }, - { - "subtask": "Grasp the green lemon with the right gripper", - "subtask_index": 43 - }, - { - "subtask": "Place the soft facial cleanser on the yellow basket with the right gripper", - "subtask_index": 44 - }, - { - "subtask": "Grasp the bread slice with the right gripper", - "subtask_index": 45 - }, - { - "subtask": "Grasp the potato chips with the left gripper", - "subtask_index": 46 - }, - { - "subtask": "Grasp the duck toy with the right gripper", - "subtask_index": 47 - }, - { - "subtask": "End", - "subtask_index": 48 - }, - { - "subtask": "Grasp the blackboard erasure with the left gripper", - "subtask_index": 49 - }, - { - "subtask": "Grasp the coke with the left gripper", - "subtask_index": 50 - }, - { - "subtask": "Place the blackboard erasure on the yellow basket with the right gripper", - "subtask_index": 51 - }, - { - "subtask": "Grasp the round wooden block with the left gripper", - "subtask_index": 52 - }, - { - "subtask": "Place the tin on the yellow basket with the left gripper", - "subtask_index": 53 - }, - { - "subtask": "Place the bread slice on the yellow basket with the right gripper", - "subtask_index": 54 - }, - { - "subtask": "Place the compasses on the yellow basket with the left gripper", - "subtask_index": 55 - }, - { - "subtask": "Place the square wooden block on the yellow basket with the left gripper", - "subtask_index": 56 - }, - { - "subtask": "Place the chocolate cake on the yellow basket with the right gripper", - "subtask_index": 57 - }, - { - "subtask": "Grasp the compasses with the left gripper", - "subtask_index": 58 - }, - { - "subtask": "Place the peach doll on the yellow basket with the left gripper", - "subtask_index": 59 - }, - { - "subtask": "Grasp the blue pot with the right gripper", - "subtask_index": 60 - }, - { - "subtask": "Grasp the round bread with the right gripper", - "subtask_index": 61 - }, - { - "subtask": "Grasp the chocolate cake with the left gripper", - "subtask_index": 62 - }, - { - "subtask": "Place the coke on the yellow basket with the left gripper", - "subtask_index": 63 - }, - { - "subtask": "Place the duck toy on the yellow basket with the left gripper", - "subtask_index": 64 - }, - { - "subtask": "Grasp the tin with the right gripper", - "subtask_index": 65 - }, - { - "subtask": "Place the round wooden block on the yellow basket with the right gripper", - "subtask_index": 66 - }, - { - "subtask": "Place the square wooden block on the yellow basket with the right gripper", - "subtask_index": 67 - }, - { - "subtask": "Grasp the tape with the right gripper", - "subtask_index": 68 - }, - { - "subtask": "Grasp the coke with the right gripper", - "subtask_index": 69 - }, - { - "subtask": "Place the round bread on the yellow basket with the right gripper", - "subtask_index": 70 - }, - { - "subtask": "Grasp the tape with the left gripper", - "subtask_index": 71 - }, - { - "subtask": "Grasp the square chewing gum with the right gripper", - "subtask_index": 72 - }, - { - "subtask": "Grasp the peach doll with the left gripper", - "subtask_index": 73 - }, - { - "subtask": "Grasp the shower sphere with the right gripper", - "subtask_index": 74 - }, - { - "subtask": "Place the brown towel on the yellow basket with the left gripper", - "subtask_index": 75 - }, - { - "subtask": "Grasp the bread slice with the left gripper", - "subtask_index": 76 - }, - { - "subtask": "Grasp the yogurt with the left gripper", - "subtask_index": 77 - }, - { - "subtask": "Grasp the blackboard erasure with the right gripper", - "subtask_index": 78 - }, - { - "subtask": "Place the coke on the yellow basket with the right gripper", - "subtask_index": 79 - }, - { - "subtask": "Place the chocolate on the yellow basket with the right gripper", - "subtask_index": 80 - }, - { - "subtask": "Place the potato chips on the yellow basket with the right gripper", - "subtask_index": 81 - }, - { - "subtask": "Place the plugboard on the yellow basket with the right gripper", - "subtask_index": 82 - }, - { - "subtask": "Place the blue pot on the yellow basket with the left gripper", - "subtask_index": 83 - }, - { - "subtask": "Place the round bread on the yellow basket with the left gripper", - "subtask_index": 84 - }, - { - "subtask": "Grasp the round bread with the left gripper", - "subtask_index": 85 - }, - { - "subtask": "Grasp the square wooden block with the left gripper", - "subtask_index": 86 - }, - { - "subtask": "Grasp the blue cup with the right gripper", - "subtask_index": 87 - }, - { - "subtask": "Grasp the soft facial cleanser with the right gripper", - "subtask_index": 88 - }, - { - "subtask": "Grasp the square wooden block with the right gripper", - "subtask_index": 89 - }, - { - "subtask": "Grasp the round wooden block with the right gripper", - "subtask_index": 90 - }, - { - "subtask": "Place the square chewing gum on the yellow basket with the right gripper", - "subtask_index": 91 - }, - { - "subtask": "Grasp the banana with the right gripper", - "subtask_index": 92 - }, { "subtask": "null", - "subtask_index": 93 + "subtask_index": 6 } ], "atomic_actions": [ "grasp", - "pick", - "place" + "lift", + "lower" ], "robot_name": [ - "Galaxea_R1_Lite" + "Agilex_Cobot_Magic" ], "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", "cam_right_wrist_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -66675,53 +62959,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 102, - "total_frames": 18153, + "total_episodes": 112, + "total_frames": 49737, "fps": 30, - "total_tasks": 94, - "total_videos": 408, + "total_tasks": 7, + "total_videos": 336, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, - "camera_views": 4, - "dataset_size": "742.42 MB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "386.89 MB" }, - "frame_num": 18153, - "dataset_size": "742.42 MB", - "data_structure": "Galaxea_R1_Lite_storage_object_yellow_basket_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (90 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 49737, + "dataset_size": "386.89 MB", + "data_structure": "Agilex_Cobot_Magic_move_mouse_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (100 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:101" + "train": "0:111" }, "features": { - "observation.images.cam_head_left_rgb": { - "dtype": "video", - "shape": [ - 720, - 1280, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 720, - "video.width": 1280, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -66730,8 +62991,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -66743,8 +63004,8 @@ "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -66753,8 +63014,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -66766,8 +63027,8 @@ "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -66776,8 +63037,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -66789,7 +63050,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -66798,20 +63059,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -66820,14 +63093,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -66979,26 +63264,6 @@ ], "dtype": "int32" }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, "gripper_mode_state": { "names": [ "left_gripper_mode", @@ -67038,6 +63303,26 @@ 2 ], "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } }, "authors": { @@ -67059,17 +63344,231 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Airbot_MMK2_prepare_tea": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" + "G1edu-u3_put_the_tissue_box_al": { + "path": "G1edu-u3_put_the_tissue_box_al", + "dataset_name": "put_the_tissue_box_al", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" ], - "tags": [ - "RoboCOIN", + "scene_type": [], + "atomic_actions": [ + "grasp", + "place" + ], + "tasks": "End", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "box", + "level1": "home_storage", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tissue", + "level1": "daily_necessities", + "level2": "tissue", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-10816", + "dataset_size": "142.3MB", + "statistics": { + "total_episodes": 39, + "total_frames": 10816, + "total_tasks": 1, + "total_videos": 39, + "total_chunks": 1, + "chunks_size": 41, + "fps": 30 + }, + "dataset_uuid": "39aeb43d-8473-408d-a61d-bdd24134ab9a", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Place the tissue box on the table with both gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_put_the_tissue_box_al_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_put_the_tissue_box_al_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "leju_robot_box_storage_parcel_i": { + "path": "leju_robot_box_storage_parcel_i", + "dataset_name": "box_storage_parcel_i", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Pick up the bottle from the table.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "box", + "level1": "home_storage", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "parcel", + "level1": "container", + "level2": "parcel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "conveyor_belt", + "level1": "industrial_equipment", + "level2": "conveyor_belt", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-313461", + "dataset_size": "19.3GB", + "statistics": { + "total_episodes": 222, + "total_frames": 313461, + "total_tasks": 1, + "total_videos": 666, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "6e0f6aaf-d447-436f-bfc3-bedd5a1bce04", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Pick up the bottle from the table.", + "Flip the bottle to face forward.", + "Take the bottle from the table.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_box_storage_parcel_i_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_box_storage_parcel_i_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_storage_cake_pan": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", "LeRobot" ], "license": "apache-2.0", @@ -67091,11 +63590,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_prepare_tea", + "dataset_name": "Airbot_MMK2_storage_cake_pan", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "living_room", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -67103,33 +63602,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "small_teapot", - "level1": "teacus", - "level2": "small_teapot", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "inner_pot_of_the_teapot", + "object_name": "pan", "level1": "kitchen_supplies", - "level2": "inner_pot_of_the_teapot", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tea_canister", - "level1": "tea_bags", - "level2": "tea_canister", + "level2": "pan", "level3": null, "level4": null, "level5": null }, { - "object_name": "tea", - "level1": "tea_bags", - "level2": "tea", + "object_name": "cake", + "level1": "bread", + "level2": "cake", "level3": null, "level4": null, "level5": null @@ -67137,40 +63620,32 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "with the right hand, take out the tea leaves from the tea canister and put them into the inner pot of the teapot. with the left hand, put the inner pot of the teapot into the teapot and then close the lid of the teapot." + "put the pot down with left hand and place the cake in it with right hand." ], "sub_tasks": [ { - "subtask": "Place the tea leaves into the tea strainer with the right gripper", + "subtask": "Grasp the frying pan with the left gripper", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Grasp the bread with the right gripper", "subtask_index": 1 }, { - "subtask": "Close the teapot lid with the left gripper", + "subtask": "End", "subtask_index": 2 }, { - "subtask": "Grasp the tea strainer with the left gripper", + "subtask": "Place the bread on the frying pan with the right gripper", "subtask_index": 3 }, { - "subtask": "Abnormal", + "subtask": "Place the frying pan on the table with the left gripper", "subtask_index": 4 }, - { - "subtask": "Place the tea strainer into the teapot with the left gripper", - "subtask_index": 5 - }, - { - "subtask": "Grasp the tea leaves with the right gripper", - "subtask_index": 6 - }, { "subtask": "null", - "subtask_index": 7 + "subtask_index": 5 } ], "atomic_actions": [ @@ -67210,23 +63685,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 89, - "total_frames": 70954, + "total_episodes": 48, + "total_frames": 8832, "fps": 30, - "total_tasks": 8, - "total_videos": 356, + "total_tasks": 6, + "total_videos": 192, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "2.99 GB" + "dataset_size": "250.91 MB" }, - "frame_num": 70954, - "dataset_size": "2.99 GB", - "data_structure": "Airbot_MMK2_prepare_tea_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (77 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 8832, + "dataset_size": "250.91 MB", + "data_structure": "Airbot_MMK2_storage_cake_pan_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:88" + "train": "0:47" }, "features": { "observation.images.cam_head_rgb": { @@ -67580,1351 +64055,164 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "RMC-AIDA-L_organise_the_document_bag": { - "path": "RMC-AIDA-L_organise_the_document_bag", - "dataset_name": "organise_the_document_bag", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place", - "pick", - "pull" - ], - "tasks": "Pulling up the zipper on the transparent plastic bag with the right gripper to close it", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "document_bag", - "level1": "container", - "level2": "document_bag", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-225360", - "dataset_size": "2.0GB", - "statistics": { - "total_episodes": 480, - "total_frames": 225360, - "total_tasks": 4, - "total_videos": 1440, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "327e0fe7-1be9-4724-b1ae-f511d8ab7f6b", - "language": [ - "en", - "zh" - ], + "Galaxea_R1_Lite_mix_blue_yellow_large_test_tube": { "task_categories": [ "robotics" ], - "sub_tasks": [ - "Pulling up the zipper on the transparent plastic bag with the right gripper to close it", - "Pulling up the zipper on the transparent plastic bag with the left gripper to close it", - "Pick up the transparent plastic bag with the left gripper", - "Pick up the transparent plastic bag with the right gripper", - "Place the transparent plastic bag", - "Place the transparent plastic bag with the right gripper", - "End", - "null" + "language": [ + "en" ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", "tags": [ "RoboCOIN", "LeRobot" ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "RMC-AIDA-L_organise_the_document_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_organise_the_document_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Cobot_Magic_put_in_the_pear": { - "path": "Cobot_Magic_put_in_the_pear", - "dataset_name": "put_in_the_pear", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Grasp the high-fiber fruit", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, + "license": "apache-2.0", + "configs": [ { - "object_name": "pear", - "level1": "fruit", - "level2": "pear", - "level3": null, - "level4": null, - "level5": null + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" } ], - "operation_platform_height": 77.2, - "frame_range": "0-21690", - "dataset_size": "461.5MB", - "statistics": { - "total_episodes": 97, - "total_frames": 21690, - "total_tasks": 1, - "total_videos": 291, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 50 - }, - "dataset_uuid": "9ef9a074-b4b1-4d16-a577-460381b81a91", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the high-fiber fruit", - "abnormal", - "Place it on the right side of the table", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] + "codebase_version": "v2.1", + "dataset_name": "Galaxea_R1_Lite_mix_blue_yellow_large_test_tube", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "office_workspace", + "level2": "office", + "level3": null, + "level4": null, + "level5": null }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_put_in_the_pear_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_put_in_the_pear_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Galbot_g1_steamer_storage_baozi_e": { - "path": "Galbot_g1_steamer_storage_baozi_e", - "dataset_name": "steamer_storage_baozi_e", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Grasp the pot lid with left gripper", + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "baozi", - "level1": "food", - "level2": "baozi", + "object_name": "blue_pigment", + "level1": "materials", + "level2": "blue_pigment", "level3": null, "level4": null, "level5": null }, { - "object_name": "steamer", - "level1": "cookware", - "level2": "steamer", + "object_name": "yellow_pigment", + "level1": "materials", + "level2": "yellow_pigment", "level3": null, "level4": null, "level5": null }, { - "object_name": "pot_lid", - "level1": "daily_necessities", - "level2": "pot_lid", + "object_name": "red_pigment", + "level1": "materials", + "level2": "red_pigment", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-627792", - "dataset_size": "11.2GB", - "statistics": { - "total_episodes": 607, - "total_frames": 627792, - "total_tasks": 1, - "total_videos": 1821, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "c1e9fbc2-7775-4cc2-9a8c-d4c58aca210a", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the pot lid with left gripper", - "Abnormal", - "Place the pot lid on the steamer with left gripper", - "End", - "Place the baozi on the steamer with right gripper", - "Grasp the baozi in the plate with right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Galbot_g1_steamer_storage_baozi_e_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Galbot_g1_steamer_storage_baozi_e_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "RMC-AIDA-L_pull_open_bag": { - "path": "RMC-AIDA-L_pull_open_bag", - "dataset_name": "pull_open_bag", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pull", - "zip", - "up" - ], - "tasks": "Grab the zipper with your left hand.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "test_tube_rack", + "level1": "holding_utensils", + "level2": "test_tube_rack", "level3": null, "level4": null, "level5": null }, { - "object_name": "food_bag", - "level1": "container", - "level2": "food_bag", + "object_name": "test_tubes", + "level1": "laboratory_supplies", + "level2": "test_tubes", "level3": null, "level4": null, "level5": null }, { - "object_name": "zipper", - "level1": "fastener", - "level2": "zipper", + "object_name": "beaker", + "level1": "holding_utensils", + "level2": "beaker", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-684327", - "dataset_size": "8.3GB", - "statistics": { - "total_episodes": 1396, - "total_frames": 684327, - "total_tasks": 5, - "total_videos": 4188, - "total_chunks": 2, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "4363efec-236f-4495-a86b-14c3a5d20345", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "pick up the test tube with yellow pigment and the test tube with blue pigment by grippers and pour them into the beaker." ], "sub_tasks": [ - "Grab the zipper with your left hand.", - "Abnormal", - "Place the bag on the table with left gripper", - "Pull open the zipper with your right hand.", - "Grab the bag with your left hand.", - "Static", - "Unzip the zipper of bag with right gripper", - "End", - "Pick up the bag with left gripper", - "Pick up the bag with right gripper", - "Deliver the bag from right gripper to left gripper", - "Pull up the zipper with your right hand.", - "Discard.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "RMC-AIDA-L_pull_open_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_pull_open_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)" - }, - "Airbot_MMK2_storage_peach_pear": { - "path": "Airbot_MMK2_storage_peach_pear", - "dataset_name": "Airbot_MMK2_storage_peach_pear", - "robot_type": "", - "end_effector_type": [ - "five_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Grasp a peach with the left gripper", - "objects": [ { - "object_name": "peach", - "level1": "fruits", - "level2": "peach", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the yellow reagent with the right gripper", + "subtask_index": 0 }, { - "object_name": "pear", - "level1": "fruits", - "level2": "pear", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pour the yellow reagent into the graduated cylinder and place the test tube into the paper cup", + "subtask_index": 1 }, { - "object_name": "storage_box", - "level1": "storage_utensils", - "level2": "storage_box", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-2804", - "dataset_size": "114.2MB", - "statistics": { - "total_episodes": 12, - "total_frames": 2804, - "total_tasks": 1, - "total_videos": 48, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "c48b833c-02bd-4fd5-a2c2-5b03bf2c2936", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp a peach with the left gripper", - "Grasp a pear with the right gripper", - "Abnormal", - "Place the peach into the left compartment of the storage box with the left gripper", - "Place the pear into the right compartment of the storage box with the right gripper", - "End", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Airbot_MMK2_storage_peach_pear_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Airbot_MMK2_storage_peach_pear_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "G1edu-u3_plate_storage_doll": { - "path": "G1edu-u3_plate_storage_doll", - "dataset_name": "plate_storage_doll", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Static", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pour the yellow reagent into the graduated cylinder with the right gripper", + "subtask_index": 2 }, { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pour the blue reagent into the graduated cylinder and place the test tube into the paper cup", + "subtask_index": 3 }, { - "object_name": "doll", - "level1": "toys", - "level2": "doll", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-133026", - "dataset_size": "3.7GB", - "statistics": { - "total_episodes": 388, - "total_frames": 133026, - "total_tasks": 2, - "total_videos": 1164, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "45f1baec-34a9-4f8d-bc50-3ef2086cf1a9", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Static", - "Place the pink doll into the plate with the left gripper", - "Grasp the pink doll with the right gripper", - "End", - "Place the pink doll into the plate with the right gripper", - "Grasp the pink doll with the left gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_plate_storage_doll_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_plate_storage_doll_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "R1_Lite_take_and_put_the_bowl": { - "path": "R1_Lite_take_and_put_the_bowl", - "dataset_name": "take_and_put_the_bowl", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place it on the table", - "objects": [ + "subtask": "Pour the blue reagent into the graduated cylinder with the left gripper", + "subtask_index": 4 + }, { - "object_name": "cabinet", - "level1": "furniture", - "level2": "cabinet", - "level3": null, - "level4": null, - "level5": null + "subtask": "End", + "subtask_index": 5 }, { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the test tube into the paper cup with the right gripper", + "subtask_index": 6 }, { - "object_name": "cabinet_door", - "level1": "furniture", - "level2": "cabinet_door", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the test tube into the paper cup with the left gripper", + "subtask_index": 7 }, { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the blue reagent with the left gripper", + "subtask_index": 8 }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null + "subtask": "null", + "subtask_index": 9 } ], - "operation_platform_height": null, - "frame_range": "0-189200", - "dataset_size": "7.0GB", - "statistics": { - "total_episodes": 94, - "total_frames": 189200, - "total_tasks": 1, - "total_videos": 282, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "29eb76ea-0c49-4f11-8073-7f7501b77736", - "language": [ - "en", - "zh" + "atomic_actions": [ + "grasp", + "pick", + "place", + "pour" ], - "task_categories": [ - "robotics" + "robot_name": [ + "Galaxea_R1_Lite" ], - "sub_tasks": [ - "Place it on the table", - "Put it back into the cabinet", - "Take the plate from the cabinet", - "Pick up the plate from the table", - "Close the cabinet door", - "Pick up the bowl from the table", - "Take the bowl from the cabinet", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_take_and_put_the_bowl_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_take_and_put_the_bowl_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AIRBOT_MMK2_chop_the_scallions": { - "path": "AIRBOT_MMK2_chop_the_scallions", - "dataset_name": "chop_the_scallions", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place", - "cut" - ], - "tasks": "Grasp the kitchen knife with the right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "knife", - "level1": "food", - "level2": "green_onion", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-14308", - "dataset_size": "573.9MB", - "statistics": { - "total_episodes": 50, - "total_frames": 14308, - "total_tasks": 1, - "total_videos": 200, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "5beeb056-0ab1-4611-8791-0f80b7dc82d2", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the kitchen knife with the right gripper", - "Press the scallion with the left gripper", - "Place the kitchen knife back on the knife holder with the right gripper", - "Static", - "End", - "Abnormal", - "Cut scallions with the right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_chop_the_scallions_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_chop_the_scallions_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "leju_robot_box_storage_parcel_f": { - "path": "leju_robot_box_storage_parcel_f", - "dataset_name": "box_storage_parcel_f", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place the package into the parcel locker.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "home_storage", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "parcel", - "level1": "container", - "level2": "parcel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "conveyor_belt", - "level1": "industrial_equipment", - "level2": "conveyor_belt", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-185943", - "dataset_size": "9.5GB", - "statistics": { - "total_episodes": 497, - "total_frames": 185943, - "total_tasks": 1, - "total_videos": 1491, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "52fd6211-f929-4608-b45d-501557f97fc8", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the package into the parcel locker.", - "Pick up the package from the inbound machine.", - "Pick up the package from the conveyor belt.", - "Place the package onto the inbound machine.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_box_storage_parcel_f_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_box_storage_parcel_f_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "G1edu-u3_place_plastic_bowl_ag": { - "path": "G1edu-u3_place_plastic_bowl_ag", - "dataset_name": "place_plastic_bowl_ag", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "pick", - "place" - ], - "tasks": "Place the plastic bowl on the table with right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plastic_bowl", - "level1": "container", - "level2": "plastic_bowl", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-7219", - "dataset_size": "116.0MB", - "statistics": { - "total_episodes": 38, - "total_frames": 7219, - "total_tasks": 1, - "total_videos": 38, - "total_chunks": 1, - "chunks_size": 39, - "fps": 30 - }, - "dataset_uuid": "200d19c3-55a3-44fd-830a-10c1b2f63ed7", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the plastic bowl on the table with right gripper", - "End", - "Place the plastic bowl on the table with left gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_place_plastic_bowl_ag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_place_plastic_bowl_ag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "R1_Lite_put_the_pillow_on_the_bed": { - "path": "R1_Lite_put_the_pillow_on_the_bed", - "dataset_name": "put_the_pillow_on_the_bed", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Pick up the pillow", - "objects": [ - { - "object_name": "pillow", - "level1": "daily_necessities", - "level2": "pillow", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bed", - "level1": "furniture", - "level2": "bed", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-53484", - "dataset_size": "1.7GB", - "statistics": { - "total_episodes": 48, - "total_frames": 53484, - "total_tasks": 1, - "total_videos": 144, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "c25eff25-52c9-4178-a906-2f1040e89068", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Pick up the pillow", - "Place the pillow at the end of the bed", - "Move to the head of the bed", - "Move to the foot of the bed", - "Place the pillow at the bedside", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_put_the_pillow_on_the_bed_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_put_the_pillow_on_the_bed_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Airbot_MMK2_stack_block": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } - ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_stack_block", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "household", - "level2": "bedroom", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ - { - "object_name": "square_building_blocks", - "level1": "toys", - "level2": "square_building_blocks", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "block_pillar", - "level1": "toys", - "level2": "block_pillar", - "level3": null, - "level4": null, - "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "place the square building blocks in the center with left hand and put the cylindrical building blocks on top with right hand." - ], - "sub_tasks": [ - { - "subtask": "Grasp the blue build blocks with the left gripper", - "subtask_index": 0 - }, - { - "subtask": "Grasp the green build blocks with the right gripper", - "subtask_index": 1 - }, - { - "subtask": "Place the blue build blocks on the Mini table with the left gripper", - "subtask_index": 2 - }, - { - "subtask": "Place the green build blocks on the blue build block with the right gripper", - "subtask_index": 3 - }, - { - "subtask": "Grasp the blue diamond shaped build blocks with the right gripper", - "subtask_index": 4 - }, - { - "subtask": "Grasp the red build blocks with the right gripper", - "subtask_index": 5 - }, - { - "subtask": "Place the blue diamond shaped build blocks on the glasses case with the right gripper", - "subtask_index": 6 - }, - { - "subtask": "Place the purple build blocks on the red build block with the right gripper", - "subtask_index": 7 - }, - { - "subtask": "Place the green build blocks on the center of the table with the left gripper", - "subtask_index": 8 - }, - { - "subtask": "Grasp the green build blocks with the left gripper", - "subtask_index": 9 - }, - { - "subtask": "Place the glasses case on the red and green build blocks with the left gripper", - "subtask_index": 10 - }, - { - "subtask": "Grasp the purple build blocks with the right gripper", - "subtask_index": 11 - }, - { - "subtask": "Place the blue build blocks on the yellow build block with the left gripper", - "subtask_index": 12 - }, - { - "subtask": "Grasp the glasses case with the left gripper", - "subtask_index": 13 - }, - { - "subtask": "Place the green cylindrical build blocks on the green build blocks with the right gripper", - "subtask_index": 14 - }, - { - "subtask": "End", - "subtask_index": 15 - }, - { - "subtask": "Place the blue build blocks on the pink build block with the left gripper", - "subtask_index": 16 - }, - { - "subtask": "Place the red build blocks on the blue build block with the right gripper", - "subtask_index": 17 - }, - { - "subtask": "Grasp the green cylindrical build blocks with the right gripper", - "subtask_index": 18 - }, - { - "subtask": "null", - "subtask_index": 19 - } - ], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "robot_name": [ - "Airbot_MMK2" - ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -68941,30 +64229,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 236, - "total_frames": 60925, + "total_episodes": 51, + "total_frames": 29341, "fps": 30, - "total_tasks": 20, - "total_videos": 944, + "total_tasks": 10, + "total_videos": 204, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, + "state_dim": 14, + "action_dim": 14, "camera_views": 4, - "dataset_size": "2.00 GB" + "dataset_size": "1.59 GB" }, - "frame_num": 60925, - "dataset_size": "2.00 GB", - "data_structure": "Airbot_MMK2_stack_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (224 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 29341, + "dataset_size": "1.59 GB", + "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_mix_blue_yellow_large_test_tube_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (39 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:235" + "train": "0:50" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -68973,8 +64261,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -68983,11 +64271,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_head_right_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -68996,8 +64284,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -69006,10 +64294,10 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -69019,7 +64307,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -69029,10 +64317,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -69042,7 +64330,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -69055,7 +64343,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -69070,36 +64358,14 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -69114,30 +64380,8 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "timestamp": { @@ -69288,6 +64532,66 @@ 2 ], "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" } }, "authors": { @@ -69309,9 +64613,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Airbot_MMK2_storage_diamond_ring": { + "Airbot_MMK2_storage_shark_doll": { "task_categories": [ "robotics" ], @@ -69341,11 +64645,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_diamond_ring", + "dataset_name": "Airbot_MMK2_storage_shark_doll", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "bedroom", + "level1": "commercial_convenience", + "level2": "supermarket", "level3": null, "level4": null, "level5": null @@ -69353,17 +64657,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "diamond_ring", - "level1": "daily_necessities", - "level2": "diamond_ring", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "flip_top_paper_boxes", - "level1": "packaging", - "level2": "flip_top_paper_boxes", + "object_name": "shark_doll", + "level1": "toys", + "level2": "shark_doll", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "lid", + "level1": "laboratory_supplies", + "level2": "lid", "level3": null, "level4": null, "level5": null @@ -69371,23 +64683,23 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "put the ring into the box." + "pick up the shark doll and put it in the lid." ], "sub_tasks": [ { - "subtask": "Place the diamond ring in the box with the right gripper", + "subtask": "Place the whale on the white lid with the right gripper", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Grasp the whale with the left gripper", "subtask_index": 1 }, { - "subtask": "Close the lid of the box with the left gripper", + "subtask": "Deliver the whale from left gripper to right gripper", "subtask_index": 2 }, { - "subtask": "Grasp the diamond ring with the right gripper", + "subtask": "End", "subtask_index": 3 }, { @@ -69398,8 +64710,7 @@ "atomic_actions": [ "grasp", "place", - "pick", - "turn" + "pick" ], "robot_name": [ "Airbot_MMK2" @@ -69433,23 +64744,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 9131, + "total_episodes": 93, + "total_frames": 13034, "fps": 30, "total_tasks": 5, - "total_videos": 200, + "total_videos": 372, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "426.97 MB" + "dataset_size": "461.32 MB" }, - "frame_num": 9131, - "dataset_size": "426.97 MB", - "data_structure": "Airbot_MMK2_storage_diamond_ring_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 13034, + "dataset_size": "461.32 MB", + "data_structure": "Airbot_MMK2_storage_shark_doll_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (81 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:49" + "train": "0:92" }, "features": { "observation.images.cam_head_rgb": { @@ -69803,513 +65114,343 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_storage_block_BBs": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" + "Cobot_Magic_storage_plate": { + "path": "Cobot_Magic_storage_plate", + "dataset_name": "storage_plate", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_block_BBs", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "scene_level1", - "level2": "scene_level2", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "tasks": "Left grab the white plate.", "objects": [ { - "object_name": null, - "level1": "bb_pellets", - "level2": "ball", - "level3": "bb_pellets", + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, "level4": null, "level5": null }, { - "object_name": null, - "level1": "bowl", - "level2": "bowl", - "level3": "bowl", + "object_name": "rack", + "level1": "furniture", + "level2": "rack", + "level3": null, "level4": null, "level5": null }, { - "object_name": null, - "level1": "building_blocks", - "level2": "rectangular_building_blocks", - "level3": "building_blocks", + "object_name": "tablecloth", + "level1": "clothing", + "level2": "tablecloth", + "level3": null, "level4": null, "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "put the bb pellets and rectangular building blocks into the bowl." - ], - "sub_tasks": [ - { - "subtask": "Place the green cuboid block into the bowl with the right gripper", - "subtask_index": 0 - }, - { - "subtask": "Grasp the green cuboid block with the right gripper", - "subtask_index": 1 - }, - { - "subtask": "Grasp the bullet with the left gripper", - "subtask_index": 2 - }, - { - "subtask": "Place the bullet into the bowl with the left gripper", - "subtask_index": 3 - }, - { - "subtask": "End", - "subtask_index": 4 }, { - "subtask": "null", - "subtask_index": 5 + "object_name": "plate", + "level1": "container", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null } ], - "atomic_actions": [ - "pick", - "clip", - "place", - "lift" - ], - "robot_name": [ - "Airbot_MMK2" - ], - "end_effector_type": "five_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" - ], - "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" - }, - "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" - ], + "operation_platform_height": 77.2, + "frame_range": "0-57019", + "dataset_size": "903.4MB", "statistics": { - "total_episodes": 48, - "total_frames": 8640, - "fps": 30, - "total_tasks": 6, - "total_videos": 192, + "total_episodes": 84, + "total_frames": 57019, + "total_tasks": 1, + "total_videos": 252, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "282.66 MB" - }, - "frame_num": 8640, - "dataset_size": "282.66 MB", - "data_structure": "Airbot_MMK2_storage_block_BBs_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", - "splits": { - "train": "0:47" + "fps": 50 }, - "features": { - "observation.images.cam_head_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false + "dataset_uuid": "9f0b0953-fa62-4d10-8b2a-17b1ff44e9cf", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Left grab the white plate.", + "Left grab the gray plate.", + "Place the plate on the rack", + "Right place it at the front of the shelf.", + "Right receive the plate.", + "Pick up the plate from the table", + "Left lift the plate.", + "end", + "Left grab the blue plate.", + "Left grab the yellow plate.", + "Hand over the plate", + "Right place it at the back of the shelf.", + "Left grab the pink plate.", + "Left grab the green plate.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } - }, - "observation.images.cam_left_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_storage_plate_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_storage_plate_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Tianqin_A2_place_the_paper_box": { + "path": "Tianqin_A2_place_the_paper_box", + "dataset_name": "place_the_paper_box", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "End", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, - "observation.images.cam_right_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false + { + "object_name": "box", + "level1": "home_storage", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-94841", + "dataset_size": "3.0GB", + "statistics": { + "total_episodes": 236, + "total_frames": 94841, + "total_tasks": 1, + "total_videos": 708, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "291007f6-cfd0-4e23-bc97-da01c0f367ac", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Abnormal", + "Grasp the paper box", + "Place the paper box on the table", + "Place the data cable in the another box", + "Grasp the data cable", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } - }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Tianqin_A2_place_the_paper_box_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Tianqin_A2_place_the_paper_box_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Split_aloha_wipe_the_table": { + "path": "Split_aloha_wipe_the_table", + "dataset_name": "wipe_the_table", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick", + "wipe" + ], + "tasks": "Stand the paper cup upright", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, - "observation.state": { - "dtype": "float32", - "shape": [ - 36 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" - ] - }, - "action": { - "dtype": "float32", - "shape": [ - 36 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" - ] - }, - "timestamp": { - "dtype": "float32", - "shape": [ - 1 - ], - "names": null - }, - "frame_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "episode_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "task_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "subtask_annotation": { - "names": null, - "shape": [ - 5 - ], - "dtype": "int32" - }, - "scene_annotation": { - "names": null, - "shape": [ - 1 - ], - "dtype": "int32" - }, - "eef_sim_pose_state": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_sim_pose_action": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_direction_action": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "cup", + "level1": "container", + "level2": "cup", + "level3": null, + "level4": null, + "level5": null }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "rag", + "level1": "clothing", + "level2": "rag", + "level3": null, + "level4": null, + "level5": null }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "stain", + "level1": "garbage", + "level2": "stain", + "level3": null, + "level4": null, + "level5": null } + ], + "operation_platform_height": 77.2, + "frame_range": "0-166266", + "dataset_size": "1.3GB", + "statistics": { + "total_episodes": 302, + "total_frames": 166266, + "total_tasks": 3, + "total_videos": 906, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "c5648c51-14f0-4d84-a3cc-2fd1c28069a8", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Stand the paper cup upright", + "Wipe the stains off the table with a rag", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" }, "authors": { "contributed_by": [ { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } ] }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Split_aloha_wipe_the_table_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Split_aloha_wipe_the_table_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "RMC-AIDA-L_stir_coffee": { - "path": "RMC-AIDA-L_stir_coffee", - "dataset_name": "stir_coffee", + "RMC-AIDA-L_food_packaging": { + "path": "RMC-AIDA-L_food_packaging", + "dataset_name": "food_packaging", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -70317,12 +65458,11 @@ "scene_type": [], "atomic_actions": [ "grasp", - "take", - "put", - "move", - "stir" + "pick", + "place", + "pull" ], - "tasks": "Stir the coffee in the cup with the right gripper", + "tasks": "Grab the pear with your right hand.", "objects": [ { "object_name": "table", @@ -70333,43 +65473,67 @@ "level5": null }, { - "object_name": "cup", - "level1": "tableware", - "level2": "cup", + "object_name": "bag", + "level1": "container", + "level2": "bag", "level3": null, "level4": null, "level5": null }, { - "object_name": "spoon", - "level1": "tableware", - "level2": "spoon", + "object_name": "lunch_box", + "level1": "container", + "level2": "lunch_box", "level3": null, "level4": null, "level5": null }, { - "object_name": "tablecloth", - "level1": "clothing", - "level2": "tablecloth", + "object_name": "cucumber", + "level1": "vegetable", + "level2": "cucumber", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pear", + "level1": "fruit", + "level2": "pear", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "fruit", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "fruit", + "level2": "peach", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-555807", - "dataset_size": "3.6GB", + "frame_range": "0-817652", + "dataset_size": "9.4GB", "statistics": { - "total_episodes": 767, - "total_frames": 555807, - "total_tasks": 8, - "total_videos": 2301, + "total_episodes": 497, + "total_frames": 817652, + "total_tasks": 2, + "total_videos": 1491, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "ee676d19-f3d4-4c11-bb83-9a2474cf77c5", + "dataset_uuid": "59eb8786-4e0e-4c5e-b129-aec36ec1eda9", "language": [ "en", "zh" @@ -70378,21 +65542,21 @@ "robotics" ], "sub_tasks": [ - "Stir the coffee in the cup with the right gripper", - "Stir the coffee with right gripper", - "Abnormal", - "Move the cup with coffee to the center of view with the right gripper", - "Static", - "Move the cup in the center of the table with right gripper", - "Grasp the spoon with left gripper", - "Move the cup with coffee to the center of view with the left gripper", - "Stir the coffee in the cup with the left gripper", - "Grasp the spoon with right gripper", - "End", - "Stir the coffee with left gripper", - "Move the cup in the center of the table with left gripper", - "Pick up the spoon with the left gripper", - "Pick up the spoon with the right gripper", + "Grab the pear with your right hand.", + "Pick up the lunch box with your right hand.", + "end", + "Put the cucumber into the lunch bag with your right hand.", + "Hold the lunch bag with your left hand.", + "Secure the lunch bag with your left hand.", + "Pick up the cucumber with your right hand.", + "Hold the lunch bag with your right hand.", + "Take the banana with your right hand.", + "Place the lunch box into the lunch bag with your right hand.", + "Place the peach into the lunch bag with your right hand.", + "Zip up the lunch bag with your right hand.", + "Put the banana into the lunch bag with your right hand.", + "Place the pear into the lunch bag with your right hand.", + "Grab the peach with your right hand.", "null" ], "annotations": { @@ -70430,10 +65594,113 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "RMC-AIDA-L_stir_coffee_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_stir_coffee_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "RMC-AIDA-L_food_packaging_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_food_packaging_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_stack_cubic_block": { + "AgiBot-g1_tool_storage": { + "path": "AgiBot-g1_tool_storage", + "dataset_name": "tool_storage", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the mouse and power cord into the box.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "box", + "level1": "home_storage", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tools", + "level1": "tools", + "level2": "tools", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-30600", + "dataset_size": "13.5GB", + "statistics": { + "total_episodes": 67, + "total_frames": 30600, + "total_tasks": 1, + "total_videos": 536, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "4681c572-c9b2-475c-b9c1-66ed9f3d533d", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the mouse and power cord into the box.", + "Grab and lift both the mouse and power cord from the accessory packaging area at the same time.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AgiBot-g1_tool_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_tool_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Agilex_Cobot_Magic_fold_jeans_shorts_children's": { "task_categories": [ "robotics" ], @@ -70463,7 +65730,7 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_stack_cubic_block", + "dataset_name": "Agilex_Cobot_Magic_fold_jeans_shorts_children's", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", @@ -70475,9 +65742,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "square_building_blocks", - "level1": "toys", - "level2": "square_building_blocks", + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "denim_shorts", + "level1": "clothing", + "level2": "denim_shorts", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_tray", + "level1": "kitchen_supplies", + "level2": "green_tray", "level3": null, "level4": null, "level5": null @@ -70485,89 +65768,72 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the building blocks with left and right hands respectively and place them on the blocks." + "Fold the children's denim shorts in half with two grippers, then fold them in half again, and use the left claw to place the folded children's denim shorts on the tray." ], "sub_tasks": [ { - "subtask": "Grasp the blue build blocks with the right gripper", + "subtask": "Grasp the blue trousers with the right gripper", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Fold the blue trousers upwards with the right gripper", "subtask_index": 1 }, { - "subtask": "Place the blue build blocks on the red build block with the right gripper", + "subtask": "Fold the blue trousers from right to left with the right gripper", "subtask_index": 2 }, { - "subtask": "Place the yellow build blocks on the orange build block with the right gripper", + "subtask": "Place the folded blue trousers on the green tray with the left gripper", "subtask_index": 3 }, { - "subtask": "Place the red build blocks on the center of the table with the left gripper", + "subtask": "Grasp the blue trousers with the left gripper", "subtask_index": 4 }, { - "subtask": "Place the orange build blocks on the yellow build block with the right gripper", + "subtask": "Fold the blue trousers upwards with the left gripper", "subtask_index": 5 }, { - "subtask": "Grasp the blue build blocks with the left gripper", + "subtask": "Fold the blue trousers from left to right with the left gripper", "subtask_index": 6 }, { - "subtask": "Grasp the red build blocks with the left gripper", + "subtask": "end", "subtask_index": 7 }, - { - "subtask": "Abnormal", - "subtask_index": 8 - }, - { - "subtask": "Grasp the orange build blocks with the right gripper", - "subtask_index": 9 - }, - { - "subtask": "Place the blue build blocks on the red build block with the left gripper", - "subtask_index": 10 - }, - { - "subtask": "Grasp the yellow build blocks with the right gripper", - "subtask_index": 11 - }, { "subtask": "null", - "subtask_index": 12 + "subtask_index": 8 } ], "atomic_actions": [ "grasp", - "pick", - "place" + "lift", + "lower", + "fold" ], "robot_name": [ - "Airbot_MMK2" + "Agilex_Cobot_Magic" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -70578,23 +65844,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 162, - "total_frames": 48691, + "total_episodes": 50, + "total_frames": 60562, "fps": 30, - "total_tasks": 13, - "total_videos": 648, + "total_tasks": 9, + "total_videos": 150, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "1.92 GB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "912.51 MB" }, - "frame_num": 48691, - "dataset_size": "1.92 GB", - "data_structure": "Airbot_MMK2_stack_cubic_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (150 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 60562, + "dataset_size": "912.51 MB", + "data_structure": "Agilex_Cobot_Magic_fold_jeans_shorts_children_s_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:161" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -70666,33 +65932,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, "observation.state": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -70701,42 +65944,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -70745,36 +65978,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -70925,137 +66148,90 @@ 2 ], "dtype": "int32" - } - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" - } - ] - }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" - }, - "G1edu-u3_bowl_storage_grape_singletry": { - "path": "G1edu-u3_bowl_storage_grape_singletry", - "dataset_name": "bowl_storage_grape_singletry", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Grasp the grapes with left hand", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null }, - { - "object_name": "bread", - "level1": "food", - "level2": "bread", - "level3": null, - "level4": null, - "level5": null + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" }, - { - "object_name": "bowl", - "level1": "bowl", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } - ], - "operation_platform_height": 77.2, - "frame_range": "0-119619", - "dataset_size": "2.4GB", - "statistics": { - "total_episodes": 242, - "total_frames": 119619, - "total_tasks": 1, - "total_videos": 726, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "40947b8c-339b-414f-94d9-6d5f24520362", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the grapes with left hand", - "Grasp the bread with the left gripper", - "Static", - "End", - "Place the bread the bowl with the left gripper", - "Place the grapes in the plate with left hand", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" }, "authors": { "contributed_by": [ { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" } ] }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_bowl_storage_grape_singletry_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_bowl_storage_grape_singletry_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Agilex_Cobot_Magic_fold_towel_grey_tray": { + "Realman_RMC-AIDA-L_storage_towel_basket": { "task_categories": [ "robotics" ], @@ -71085,7 +66261,7 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_fold_towel_grey_tray", + "dataset_name": "Realman_RMC-AIDA-L_storage_towel_basket", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", @@ -71098,24 +66274,24 @@ "objects": [ { "object_name": "table", - "level1": "home_storage", + "level1": "furniture", "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "green_tray", - "level1": "kitchen_supplies", - "level2": "green_tray", + "object_name": "basket", + "level1": "home_storage", + "level2": "basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "grey_square_towel", + "object_name": "towel", "level1": "daily_necessities", - "level2": "grey_square_towel", + "level2": "towel", "level3": null, "level4": null, "level5": null @@ -71123,50 +66299,41 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "use both grippers to hold the edge of the towel and fold it forward.,use the right gripper to hold the right edge and fold it again,use a gripper to place the folded towel on the tray." + "the left gripper grasp the basket on the table, the right grippe pick up the towel on the table and place it into the basket." ], "sub_tasks": [ { - "subtask": "Fold the grey towel from left to right with left gripper", + "subtask": "Place the towel into the basket with the right gripper", "subtask_index": 0 }, { - "subtask": "Place the folded grey towel on the tray with the right gripper", + "subtask": "Abnormal", "subtask_index": 1 }, { - "subtask": "Abnormal", + "subtask": "PIck up the basket with the left gripper", "subtask_index": 2 }, { - "subtask": "Fold the grey towel from right to left with right gripper", + "subtask": "PIck up the towel with the right gripper", "subtask_index": 3 }, { - "subtask": "Fold the grey towel upwards", + "subtask": "end", "subtask_index": 4 }, - { - "subtask": "End", - "subtask_index": 5 - }, - { - "subtask": "Place the folded grey towel on the tray with the left gripper", - "subtask_index": 6 - }, { "subtask": "null", - "subtask_index": 7 + "subtask_index": 5 } ], "atomic_actions": [ "grasp", - "lift", - "lower", - "fold" + "pick", + "place" ], "robot_name": [ - "Agilex_Cobot_Magic" + "Realman_RMC-AIDA-L" ], "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", @@ -71195,23 +66362,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 53308, + "total_episodes": 118, + "total_frames": 58520, "fps": 30, - "total_tasks": 8, - "total_videos": 150, + "total_tasks": 6, + "total_videos": 354, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, + "state_dim": 28, + "action_dim": 28, "camera_views": 3, - "dataset_size": "724.56 MB" + "dataset_size": "684.59 MB" }, - "frame_num": 53308, - "dataset_size": "724.56 MB", - "data_structure": "Agilex_Cobot_Magic_fold_towel_grey_tray_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 58520, + "dataset_size": "684.59 MB", + "data_structure": "Realman_RMC-AIDA-L_storage_towel_basket_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (106 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:49" + "train": "0:117" }, "features": { "observation.images.cam_head_rgb": { @@ -71286,69 +66453,73 @@ "observation.state": { "dtype": "float32", "shape": [ - 26 + 28 ], "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", + "right_arm_joint_7_rad", "right_gripper_open", "right_eef_pos_x_m", "right_eef_pos_y_m", "right_eef_pos_z_m", "right_eef_rot_euler_x_rad", "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" - ] - }, - "action": { - "dtype": "float32", - "shape": [ - 26 - ], - "names": [ + "right_eef_rot_euler_z_rad", "left_arm_joint_1_rad", "left_arm_joint_2_rad", "left_arm_joint_3_rad", "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_arm_joint_7_rad", "left_gripper_open", "left_eef_pos_x_m", "left_eef_pos_y_m", "left_eef_pos_z_m", "left_eef_rot_euler_x_rad", "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", + "left_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 28 + ], + "names": [ "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", + "right_arm_joint_7_rad", "right_gripper_open", "right_eef_pos_x_m", "right_eef_pos_y_m", "right_eef_pos_z_m", "right_eef_rot_euler_x_rad", "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "right_eef_rot_euler_z_rad", + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_arm_joint_7_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -71388,17 +66559,17 @@ }, "subtask_annotation": { "names": null, - "dtype": "int32", "shape": [ 5 - ] + ], + "dtype": "int32" }, "scene_annotation": { "names": null, - "dtype": "int32", "shape": [ 1 - ] + ], + "dtype": "int32" }, "eef_sim_pose_state": { "names": [ @@ -71415,10 +66586,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_sim_pose_action": { "names": [ @@ -71435,130 +66606,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" }, "gripper_mode_state": { "names": [ "left_gripper_mode", "right_gripper_mode" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "gripper_mode_action": { "names": [ "left_gripper_mode", "right_gripper_mode" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "gripper_activity_state": { "names": [ "left_gripper_activity", "right_gripper_activity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "gripper_activity_action": { "names": [ "left_gripper_activity", "right_gripper_activity" ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", "shape": [ 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" ], - "dtype": "float32", - "shape": [ - 2 - ] + "dtype": "int32" } }, "authors": { @@ -71582,9 +66753,9 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "RMC-AIDA-L_fruit_storage": { - "path": "RMC-AIDA-L_fruit_storage", - "dataset_name": "fruit_storage", + "Cobot_Magic_cap_the_pen_a": { + "path": "Cobot_Magic_cap_the_pen_a", + "dataset_name": "cap_the_pen_a", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -71593,9 +66764,10 @@ "atomic_actions": [ "grasp", "pick", - "place" + "place", + "insert" ], - "tasks": "End", + "tasks": "Secure the pen cap with the right hand.", "objects": [ { "object_name": "table", @@ -71606,51 +66778,169 @@ "level5": null }, { - "object_name": "grape", - "level1": "fruit", - "level2": "grape", + "object_name": "pen", + "level1": "office_supplies", + "level2": "pen", "level3": null, "level4": null, "level5": null }, { - "object_name": "banana", - "level1": "fruit", - "level2": "banana", + "object_name": "pen_cap", + "level1": "office_supplies", + "level2": "pen_cap", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "container", - "level2": "plate", + "object_name": "tablecloth", + "level1": "clothing", + "level2": "tablecloth", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-51317", + "dataset_size": "1.0GB", + "statistics": { + "total_episodes": 55, + "total_frames": 51317, + "total_tasks": 1, + "total_videos": 165, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 50 + }, + "dataset_uuid": "b549e79c-e49e-4cec-a476-b94bdd766f22", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Secure the pen cap with the right hand.", + "Insert the pen into the cap", + "End", + "Put the cap on the pen", + "Pick up the pen cap", + "Grab the pen barrel with the left hand.", + "Secure the pen cap with the left hand.", + "Grab the pen cap with the left hand.", + "Place the pen on the table", + "Abnormal", + "Grab the pen cap with the right hand.", + "Insert the pen cap with the right hand.", + "Grab the pen barrel with the right hand.", + "Pick up the pen", + "Place the pen down with the right hand.", + "Insert the pen cap with the left hand.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_cap_the_pen_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_cap_the_pen_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Split_aloha_wipe_table": { + "path": "Split_aloha_wipe_table", + "dataset_name": "wipe_table", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick", + "wipe" + ], + "tasks": "Grasp the rag with right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "bowl", + "object_name": "cup", "level1": "container", - "level2": "bowl", + "level2": "cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rag", + "level1": "clothing", + "level2": "rag", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "stain", + "level1": "garbage", + "level2": "stain", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-291785", - "dataset_size": "2.6GB", + "frame_range": "0-106804", + "dataset_size": "1.3GB", "statistics": { - "total_episodes": 521, - "total_frames": 291785, - "total_tasks": 2, - "total_videos": 1563, + "total_episodes": 200, + "total_frames": 106804, + "total_tasks": 1, + "total_videos": 600, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "8276223a-988a-4a97-8459-97f7c498cbb5", + "dataset_uuid": "18848f29-f6ff-4f8a-ab4d-acc67d812c64", "language": [ "en", "zh" @@ -71659,21 +66949,14 @@ "robotics" ], "sub_tasks": [ + "Grasp the rag with right gripper", + "Abnormal", + "Place the rag on the table with right gripper", + "Stand the paper cup upright with left gripper", "End", - "Place the grape in the purple plate with left gripper", - "Grasp the grape with right gripper", - "Place the banana in the purple plate with right gripper", - "Place the banana in the blue bowl with left gripper", - "Grasp the banana with right gripper", - "Grasp the grape with left gripper", - "Place the bowl on the table with right gripper", - "Place the grape in the purple plate with right gripper", - "Place the banana in the purple plate with left gripper", - "Lift the blue bowl with right gripper", - "Grasp the blue bowl with right gripper", "Static", - "Abnormal", - "Grasp the banana with left gripper", + "Grasp the paper cup with left gripper", + "Wipe the stains off the table with the rag with right gripper", "null" ], "annotations": { @@ -71711,22 +66994,23 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "RMC-AIDA-L_fruit_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_fruit_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Split_aloha_wipe_table_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Split_aloha_wipe_table_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "G1edu-u3_pick_up_the_bottled_water_as": { - "path": "G1edu-u3_pick_up_the_bottled_water_as", - "dataset_name": "pick_up_the_bottled_water_as", + "R1_Lite_pour_water": { + "path": "R1_Lite_pour_water", + "dataset_name": "pour_water", "robot_type": "", "end_effector_type": [ - "three_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ "grasp", - "pick" + "pick", + "place" ], - "tasks": "Grasp the water bottle and lift it to the center of the view with right gripper", + "tasks": "Pour water into the cup", "objects": [ { "object_name": "table", @@ -71737,35 +67021,43 @@ "level5": null }, { - "object_name": "bottle", - "level1": "container", - "level2": "bottle", + "object_name": "water", + "level1": "drink", + "level2": "water", "level3": null, "level4": null, "level5": null }, { - "object_name": "water", - "level1": "beverages", - "level2": "water", + "object_name": "kettle", + "level1": "electric_appliance", + "level2": "kettle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cup", + "level1": "container", + "level2": "cup", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-9753", - "dataset_size": "116.6MB", + "frame_range": "0-53125", + "dataset_size": "2.0GB", "statistics": { - "total_episodes": 24, - "total_frames": 9753, + "total_episodes": 51, + "total_frames": 53125, "total_tasks": 1, - "total_videos": 24, + "total_videos": 153, "total_chunks": 1, - "chunks_size": 24, + "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "a5f8649a-a1ab-415a-9fd4-b48b04fab77e", + "dataset_uuid": "8b05bffb-745b-4de7-bbfa-113e42018671", "language": [ "en", "zh" @@ -71774,10 +67066,12 @@ "robotics" ], "sub_tasks": [ - "Grasp the water bottle and lift it to the center of the view with right gripper", - "Grasp the water bottle and lift it to the center of the view with left gripper", - "End", - "Abnormal", + "Pour water into the cup", + "Put the cup down on the table", + "Put down the kettle on the table", + "Pour water into another cup", + "Pick up the cup", + "Pick up the kettle", "null" ], "annotations": { @@ -71815,22 +67109,23 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "G1edu-u3_pick_up_the_bottled_water_as_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_pick_up_the_bottled_water_as_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "R1_Lite_pour_water_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_pour_water_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "G1edu-u3_pick_up_the_tissue_box_ao": { - "path": "G1edu-u3_pick_up_the_tissue_box_ao", - "dataset_name": "pick_up_the_tissue_box_ao", + "Galbot_g1_steamer_storage_baozi_j": { + "path": "Galbot_g1_steamer_storage_baozi_j", + "dataset_name": "steamer_storage_baozi_j", "robot_type": "", "end_effector_type": [ - "three_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ "grasp", - "pick" + "pick", + "place" ], - "tasks": "Grasp the tissue box and lift it to the center of the view with both gripper", + "tasks": "Place the pot lid on the table with left gripper", "objects": [ { "object_name": "table", @@ -71841,35 +67136,51 @@ "level5": null }, { - "object_name": "box", - "level1": "home_storage", - "level2": "box", + "object_name": "baozi", + "level1": "food", + "level2": "baozi", "level3": null, "level4": null, "level5": null }, { - "object_name": "tissue", + "object_name": "steamer", + "level1": "cookware", + "level2": "steamer", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pot_lid", "level1": "daily_necessities", - "level2": "tissue", + "level2": "pot_lid", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-21166", - "dataset_size": "273.8MB", + "frame_range": "0-56015", + "dataset_size": "1.1GB", "statistics": { - "total_episodes": 29, - "total_frames": 21166, + "total_episodes": 41, + "total_frames": 56015, "total_tasks": 1, - "total_videos": 29, + "total_videos": 123, "total_chunks": 1, - "chunks_size": 29, + "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "3adf4438-1317-4ffb-9447-b4dd516c8559", + "dataset_uuid": "b53dc50b-2b01-4783-900a-e0a8fa876a20", "language": [ "en", "zh" @@ -71878,8 +67189,16 @@ "robotics" ], "sub_tasks": [ - "Grasp the tissue box and lift it to the center of the view with both gripper", + "Place the pot lid on the table with left gripper", + "Grasp the pot lid with left gripper", + "Place the pot lid on the steamer with left gripper", "End", + "Move the switch to the right with left gripper", + "Place the baozi on the steamer with right gripper", + "Grasp the baozi with right gripper", + "Place the baozi on the plate with right gripper", + "Move the switch to the left with right gripper", + "Grasp the baozi in the plate with right gripper", "null" ], "annotations": { @@ -71917,10 +67236,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "G1edu-u3_pick_up_the_tissue_box_ao_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_pick_up_the_tissue_box_ao_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Galbot_g1_steamer_storage_baozi_j_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Galbot_g1_steamer_storage_baozi_j_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_move_cake_tape_measure": { + "Agilex_Cobot_Magic_move_pencil_sharpener": { "task_categories": [ "robotics" ], @@ -71950,11 +67269,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_move_cake_tape_measure", + "dataset_name": "Agilex_Cobot_Magic_move_pencil_sharpener", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "other", - "level2": "courier_station", + "level1": "education", + "level2": "school", "level3": null, "level4": null, "level5": null @@ -71962,25 +67281,49 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "cake", - "level1": "bread", - "level2": "cake", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "paper_boxes", - "level1": "home_storage", - "level2": "paper_boxes", + "object_name": "deli_stapler", + "level1": "stationery", + "level2": "deli_stapler", "level3": null, "level4": null, "level5": null }, { - "object_name": "tape_measure", + "object_name": "deli_water-based_marker", "level1": "stationery", - "level2": "tape_measure", + "level2": "deli_water-based_marker", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "deli_watercolor_marker_box", + "level1": "stationery", + "level2": "deli_watercolor_marker_box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "purple_incenser", + "level1": "daily_necessities", + "level2": "purple_incense", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pencil_sharpener", + "level1": "stationery", + "level2": "pencil_sharpener", "level3": null, "level4": null, "level5": null @@ -71988,27 +67331,27 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the cake and tape measure with both hands and place them on the cardboard box." + "Place the pencil sharpener on the left side of the stapler." ], "sub_tasks": [ { - "subtask": "Grasp the tape measure with the right gripper", + "subtask": "Place the pencil sharpener to the left of the stapler with the left gripper", "subtask_index": 0 }, { - "subtask": "Place the bagged waffle on the carton with the left gripper", + "subtask": "Grasp the pencil sharpene with the right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the bagged waffle with the left gripper", + "subtask": "End", "subtask_index": 2 }, { - "subtask": "Place the tape measure on the carton with the right gripper", + "subtask": "Grasp the pencil sharpene with the left gripper", "subtask_index": 3 }, { - "subtask": "End", + "subtask": "Place the pencil sharpener to the left of the stapler with the right gripper", "subtask_index": 4 }, { @@ -72018,31 +67361,29 @@ ], "atomic_actions": [ "grasp", - "pick", - "place" + "lift", + "lower" ], "robot_name": [ - "Airbot_MMK2" + "Agilex_Cobot_Magic" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -72053,23 +67394,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 49, - "total_frames": 9381, + "total_episodes": 98, + "total_frames": 41602, "fps": 30, "total_tasks": 6, - "total_videos": 196, + "total_videos": 294, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "362.05 MB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "374.66 MB" }, - "frame_num": 9381, - "dataset_size": "362.05 MB", - "data_structure": "Airbot_MMK2_move_cake_tape_measure_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 41602, + "dataset_size": "374.66 MB", + "data_structure": "Agilex_Cobot_Magic_move_pencil_sharpener_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:48" + "train": "0:97" }, "features": { "observation.images.cam_head_rgb": { @@ -72141,33 +67482,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, "observation.state": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -72176,42 +67494,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -72220,36 +67528,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -72289,17 +67587,17 @@ }, "subtask_annotation": { "names": null, + "dtype": "int32", "shape": [ 5 - ], - "dtype": "int32" + ] }, "scene_annotation": { "names": null, + "dtype": "int32", "shape": [ 1 - ], - "dtype": "int32" + ] }, "eef_sim_pose_state": { "names": [ @@ -72316,10 +67614,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_sim_pose_action": { "names": [ @@ -72336,70 +67634,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", "shape": [ 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" ], - "dtype": "int32" + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } }, "authors": { @@ -72421,9 +67779,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Airbot_MMK2_take_dog_doll": { + "Airbot_MMK2_dial_number": { "task_categories": [ "robotics" ], @@ -72453,11 +67811,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_take_dog_doll", + "dataset_name": "Airbot_MMK2_dial_number", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "living_room", + "level2": "bedroom", "level3": null, "level4": null, "level5": null @@ -72465,25 +67823,9 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "toy_dog", - "level1": "doll", - "level2": "toy_dog", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "lid", - "level1": "storage_utensils", - "level2": "lid", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "mobile_phone", + "level1": "appliances", + "level2": "mobile_phone", "level3": null, "level4": null, "level5": null @@ -72491,30 +67833,39 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the toy dog from the white lid by hand and place it on the table." + "pick up the phone button and then put it down." ], "sub_tasks": [ { - "subtask": "Place the puppy on the table with the left and right grippers", + "subtask": "Dial the number with the left gripper", "subtask_index": 0 }, { - "subtask": "Grasp the puppy with the left and right grippers", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "End", + "subtask": "Lift the phone with the right gripper", "subtask_index": 2 }, { - "subtask": "null", + "subtask": "Grasp the phone with the right gripper", "subtask_index": 3 + }, + { + "subtask": "Place the phone on the table with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 } ], "atomic_actions": [ "grasp", "pick", - "place" + "place", + "press" ], "robot_name": [ "Airbot_MMK2" @@ -72548,23 +67899,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 6425, + "total_episodes": 98, + "total_frames": 41296, "fps": 30, - "total_tasks": 4, - "total_videos": 200, + "total_tasks": 6, + "total_videos": 392, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "253.22 MB" + "dataset_size": "1.39 GB" }, - "frame_num": 6425, - "dataset_size": "253.22 MB", - "data_structure": "Airbot_MMK2_take_dog_doll_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 41296, + "dataset_size": "1.39 GB", + "data_structure": "Airbot_MMK2_dial_number_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:49" + "train": "0:97" }, "features": { "observation.images.cam_head_rgb": { @@ -72918,7 +68269,110 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Galaxea_R1_Lite_storage_object_brown_basket": { + "AgiBot-g1_picks_up_battery_b": { + "path": "AgiBot-g1_picks_up_battery_b", + "dataset_name": "picks_up_battery_b", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "place", + "pick", + "grasp" + ], + "tasks": "Place the power supply on the operating table.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "carton", + "level1": "tool", + "level2": "carton", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "battery", + "level1": "tool", + "level2": "battery", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-131840", + "dataset_size": "71.4GB", + "statistics": { + "total_episodes": 329, + "total_frames": 131840, + "total_tasks": 1, + "total_videos": 2632, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "69e28097-0f6e-4238-8562-b87280af0714", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the power supply on the operating table.", + "Grab and lift the power supply from the large box.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AgiBot-g1_picks_up_battery_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_picks_up_battery_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_pick_up_and_place_tub": { "task_categories": [ "robotics" ], @@ -72948,11 +68402,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_storage_object_brown_basket", + "dataset_name": "Airbot_MMK2_pick_up_and_place_tub", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "living_room", + "level2": "bathroom", "level3": null, "level4": null, "level5": null @@ -72960,217 +68414,9 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "brown_basket", - "level1": "basket", - "level2": "brown_basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "fruits", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bathing_in_flowers", - "level1": "daily_chemical_products", - "level2": "bathing_in_flowers", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "blue_cup", - "level1": "cups", - "level2": "blue_cup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "blue_pot", - "level1": "cookware", - "level2": "blue_pot", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "toast_slices", - "level1": "bread", - "level2": "toast_slices", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "brown_towel", - "level1": "towels", - "level2": "brown_towel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "can", - "level1": "snacks", - "level2": "can", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "coke(slim_can)", - "level1": "beverages", - "level2": "coke(slim_can)", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "potato_chips", - "level1": "snacks", - "level2": "potato_chips", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chocolate", - "level1": "snacks", - "level2": "chocolate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "compass", - "level1": "rulers", - "level2": "compass", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "block_pillar", - "level1": "building_blocks", - "level2": "block_pillar", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_beater", - "level1": "spoons_and_spatulas", - "level2": "egg_beater", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "eraser", - "level1": "erasers", - "level2": "eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chewing_gum", - "level1": "snacks", - "level2": "chewing_gum", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mentholatum_facial_cleanser", - "level1": "daily_chemical_products", - "level2": "mentholatum_facial_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "green_lemon", - "level1": "fruits", - "level2": "green_lemon", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peach", - "level1": "fruits", - "level2": "peach", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "power_strip", - "level1": "electrical_control_equipment", - "level2": "power_strip", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "round_bread", - "level1": "bread", - "level2": "round_bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mentholatum_facial_cleanser", - "level1": "daily_chemical_products", - "level2": "mentholatum_facial_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "square_building_blocks", - "level1": "building_blocks", - "level2": "square_building_blocks", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tape", - "level1": "tape", - "level2": "tape", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cake", - "level1": "bread", - "level2": "cake", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "duck", - "level1": "doll", - "level2": "duck", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "ambrosial_yogurt", - "level1": "beverages", - "level2": "ambrosial_yogurt", + "object_name": "washbasin", + "level1": "home_storage", + "level2": "washbasin", "level3": null, "level4": null, "level5": null @@ -73178,360 +68424,44 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "use a gripper to pick the target object and place on the brown basket." + "pick up the washbasin and put it down." ], "sub_tasks": [ { - "subtask": "Grasp the blue pot with the left gripper", + "subtask": "End", "subtask_index": 0 }, { - "subtask": "Grasp the plugboard with the left gripper", + "subtask": "Lift the basin with the left gripper", "subtask_index": 1 }, { - "subtask": "Place the potato chips into the basket with the left gripper", + "subtask": "Grasp the basin with the left gripper", "subtask_index": 2 }, { - "subtask": "Place the banana into the basket with the left gripper", + "subtask": "Grasp the basin with the right gripper", "subtask_index": 3 }, { - "subtask": "Place the coke into the basket with the left gripper", + "subtask": "Place basin on the table with the right gripper", "subtask_index": 4 }, { - "subtask": "Place the brown towel into the basket with the right gripper", + "subtask": "Place basin on the table with the left gripper", "subtask_index": 5 }, { - "subtask": "Place the peach slice into the basket with the right gripper", + "subtask": "Abnormal", "subtask_index": 6 }, { - "subtask": "Grasp the banana with the left gripper", + "subtask": "Lift the basin with the right gripper", "subtask_index": 7 }, - { - "subtask": "Place the round wooden block into the basket with the right gripper", - "subtask_index": 8 - }, - { - "subtask": "Place the duck toy into the basket with the right gripper", - "subtask_index": 9 - }, - { - "subtask": "Grasp the compasses with the right gripper", - "subtask_index": 10 - }, - { - "subtask": "Place the blackboard erasure into the basket with the left gripper", - "subtask_index": 11 - }, - { - "subtask": "Place the plugboard into the basket with the left gripper", - "subtask_index": 12 - }, - { - "subtask": "Place the square chewing gum into the basket with the right gripper", - "subtask_index": 13 - }, - { - "subtask": "Grasp the duck toy with the left gripper", - "subtask_index": 14 - }, - { - "subtask": "Grasp the cookie with the right gripper", - "subtask_index": 15 - }, - { - "subtask": "Grasp the blue cup with the left gripper", - "subtask_index": 16 - }, - { - "subtask": "Place the blackboard erasure into the basket with the right gripper", - "subtask_index": 17 - }, - { - "subtask": "Place the hard facial cleanser into the basket with the left gripper", - "subtask_index": 18 - }, - { - "subtask": "Place the round bread into the basket with the left gripper", - "subtask_index": 19 - }, - { - "subtask": "Place the square wooden block into the basket with the right gripper", - "subtask_index": 20 - }, - { - "subtask": "Grasp the soft facial cleanser with the left gripper", - "subtask_index": 21 - }, - { - "subtask": "Place the blue pot into the basket with the left gripper", - "subtask_index": 22 - }, - { - "subtask": "Grasp the peach slice with the right gripper", - "subtask_index": 23 - }, - { - "subtask": "Place the soft facial cleanser into the basket with the left gripper", - "subtask_index": 24 - }, - { - "subtask": "Grasp the square chewing gum with the left gripper", - "subtask_index": 25 - }, - { - "subtask": "Place the shower sphere into the basket with the left gripper", - "subtask_index": 26 - }, - { - "subtask": "Place the yogurt into the basket with the right gripper", - "subtask_index": 27 - }, - { - "subtask": "Grasp the chocolate cake with the right gripper", - "subtask_index": 28 - }, - { - "subtask": "Place the round wooden block into the basket with the left gripper", - "subtask_index": 29 - }, - { - "subtask": "Place the back scratcher into the basket with the left gripper", - "subtask_index": 30 - }, - { - "subtask": "Grasp the shower sphere with the left gripper", - "subtask_index": 31 - }, - { - "subtask": "Grasp the yogurt with the right gripper", - "subtask_index": 32 - }, - { - "subtask": "Grasp the tin with the left gripper", - "subtask_index": 33 - }, - { - "subtask": "Grasp the brown towel with the left gripper", - "subtask_index": 34 - }, - { - "subtask": "Grasp the hard facial cleanser with the left gripper", - "subtask_index": 35 - }, - { - "subtask": "Grasp the brown towel with the right gripper", - "subtask_index": 36 - }, - { - "subtask": "Grasp the back scratcher with the left gripper", - "subtask_index": 37 - }, - { - "subtask": "Place the hard facial cleanser into the basket with the right gripper", - "subtask_index": 38 - }, - { - "subtask": "Grasp the hard facial cleanser with the right gripper", - "subtask_index": 39 - }, - { - "subtask": "Place the green lemon into the basket with the right gripper", - "subtask_index": 40 - }, - { - "subtask": "Place the banana into the basket with the right gripper", - "subtask_index": 41 - }, - { - "subtask": "Place the compasses into the basket with the right gripper", - "subtask_index": 42 - }, - { - "subtask": "Place the compasses into the basket with the left gripper", - "subtask_index": 43 - }, - { - "subtask": "Grasp the green lemon with the right gripper", - "subtask_index": 44 - }, - { - "subtask": "Grasp the bread slice with the right gripper", - "subtask_index": 45 - }, - { - "subtask": "Place the duck toy into the basket with the left gripper", - "subtask_index": 46 - }, - { - "subtask": "Place the chocolate cake into the basket with the right gripper", - "subtask_index": 47 - }, - { - "subtask": "Place the shower sphere into the basket with the right gripper", - "subtask_index": 48 - }, - { - "subtask": "Grasp the potato chips with the left gripper", - "subtask_index": 49 - }, - { - "subtask": "Grasp the duck toy with the right gripper", - "subtask_index": 50 - }, - { - "subtask": "End", - "subtask_index": 51 - }, - { - "subtask": "Place the cookie into the basket with the right gripper", - "subtask_index": 52 - }, - { - "subtask": "Place the tape into the basket with the right gripper", - "subtask_index": 53 - }, - { - "subtask": "Grasp the blackboard erasure with the left gripper", - "subtask_index": 54 - }, - { - "subtask": "Place the bread slice into the basket with the left gripper", - "subtask_index": 55 - }, - { - "subtask": "Grasp the coke with the left gripper", - "subtask_index": 56 - }, - { - "subtask": "Grasp the round wooden block with the left gripper", - "subtask_index": 57 - }, - { - "subtask": "Place the tape into the basket with the left gripper", - "subtask_index": 58 - }, - { - "subtask": "Place the blue cup into the basket with the left gripper", - "subtask_index": 59 - }, - { - "subtask": "Grasp the compasses with the left gripper", - "subtask_index": 60 - }, - { - "subtask": "Grasp the blue pot with the right gripper", - "subtask_index": 61 - }, - { - "subtask": "Grasp the round bread with the right gripper", - "subtask_index": 62 - }, - { - "subtask": "Grasp the chocolate cake with the left gripper", - "subtask_index": 63 - }, - { - "subtask": "Place the blue pot into the basket with the right gripper", - "subtask_index": 64 - }, - { - "subtask": "Grasp the tin with the right gripper", - "subtask_index": 65 - }, - { - "subtask": "Place the tin into the basket with the left gripper", - "subtask_index": 66 - }, - { - "subtask": "Grasp the tape with the right gripper", - "subtask_index": 67 - }, - { - "subtask": "Place the square chewing gum into the basket with the left gripper", - "subtask_index": 68 - }, - { - "subtask": "Place the bread slice into the basket with the right gripper", - "subtask_index": 69 - }, - { - "subtask": "Grasp the tape with the left gripper", - "subtask_index": 70 - }, - { - "subtask": "Place the blue cup into the basket with the right gripper", - "subtask_index": 71 - }, - { - "subtask": "Grasp the square chewing gum with the right gripper", - "subtask_index": 72 - }, - { - "subtask": "Grasp the shower sphere with the right gripper", - "subtask_index": 73 - }, - { - "subtask": "Place the chocolate cake into the basket with the left gripper", - "subtask_index": 74 - }, - { - "subtask": "Grasp the bread slice with the left gripper", - "subtask_index": 75 - }, - { - "subtask": "Grasp the blackboard erasure with the right gripper", - "subtask_index": 76 - }, - { - "subtask": "Place the round bread into the basket with the right gripper", - "subtask_index": 77 - }, - { - "subtask": "Grasp the round bread with the left gripper", - "subtask_index": 78 - }, - { - "subtask": "Grasp the square wooden block with the left gripper", - "subtask_index": 79 - }, - { - "subtask": "Place the brown towel into the basket with the left gripper", - "subtask_index": 80 - }, - { - "subtask": "Grasp the blue cup with the right gripper", - "subtask_index": 81 - }, - { - "subtask": "Grasp the square wooden block with the right gripper", - "subtask_index": 82 - }, - { - "subtask": "Place the square wooden block into the basket with the left gripper", - "subtask_index": 83 - }, - { - "subtask": "Place the tin into the basket with the right gripper", - "subtask_index": 84 - }, - { - "subtask": "Grasp the round wooden block with the right gripper", - "subtask_index": 85 - }, - { - "subtask": "Grasp the banana with the right gripper", - "subtask_index": 86 - }, { "subtask": "null", - "subtask_index": 87 + "subtask_index": 8 } ], "atomic_actions": [ @@ -73540,21 +68470,21 @@ "place" ], "robot_name": [ - "Galaxea_R1_Lite" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -73571,30 +68501,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 102, - "total_frames": 18209, + "total_episodes": 98, + "total_frames": 16632, "fps": 30, - "total_tasks": 88, - "total_videos": 408, + "total_tasks": 9, + "total_videos": 392, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, + "state_dim": 36, + "action_dim": 36, "camera_views": 4, - "dataset_size": "899.78 MB" + "dataset_size": "607.45 MB" }, - "frame_num": 18209, - "dataset_size": "899.78 MB", - "data_structure": "Galaxea_R1_Lite_storage_object_brown_basket_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (90 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 16632, + "dataset_size": "607.45 MB", + "data_structure": "Airbot_MMK2_pick_up_and_place_tub_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:101" + "train": "0:97" }, "features": { - "observation.images.cam_head_left_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -73603,8 +68533,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -73613,11 +68543,11 @@ "has_audio": false } }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -73626,8 +68556,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -73636,11 +68566,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -73649,8 +68579,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -73659,11 +68589,11 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_front_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -73672,8 +68602,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -73685,7 +68615,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -73700,14 +68630,36 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -73722,8 +68674,30 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -73874,66 +68848,6 @@ 2 ], "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" } }, "authors": { @@ -73955,9 +68869,108 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Agilex_Cobot_Magic_fold_towel_pink_tray": { + "AIRBOT_MMK2_close_the_computer": { + "path": "AIRBOT_MMK2_close_the_computer", + "dataset_name": "close_the_computer", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick" + ], + "tasks": "End", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "computer", + "level1": "office_supplies", + "level2": "computer", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-7754", + "dataset_size": "320.9MB", + "statistics": { + "total_episodes": 49, + "total_frames": 7754, + "total_tasks": 1, + "total_videos": 196, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "5e9fe536-85e9-4e68-b390-51ba7aec5e6b", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Press the laptop with the left gripper", + "Release the laptop with the left gripper", + "Static", + "Abnormal", + "Close the laptop with the right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AIRBOT_MMK2_close_the_computer_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_close_the_computer_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Agilex_Cobot_Magic_connect_block": { "task_categories": [ "robotics" ], @@ -73987,11 +69000,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_fold_towel_pink_tray", + "dataset_name": "Agilex_Cobot_Magic_connect_block", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "kitchen", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -74007,17 +69020,9 @@ "level5": null }, { - "object_name": "green_tray", - "level1": "kitchen_supplies", - "level2": "green_tray", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pink_square_towel", - "level1": "daily_necessities", - "level2": "pink_square_towel", + "object_name": "square_building_blocks", + "level1": "toys", + "level2": "square_building_blocks", "level3": null, "level4": null, "level5": null @@ -74025,34 +69030,9700 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "use both grippers to hold the edge of the towel and fold it forward.,use the right gripper to hold the right edge and fold it again,use a gripper to place the folded towel on the tray." + "According to the building block template in front, assemble the scattered blocks together." ], "sub_tasks": [ { - "subtask": "Fold the pink towel upwards", + "subtask": "Put the yellow build block on the right of the green build block\n", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Put the blue build block on the behind of the green build block\n", "subtask_index": 1 }, { - "subtask": "Place the folded pink towel on the tray with the left gripper", + "subtask": "Put the yellow build block in the center of the table", "subtask_index": 2 }, { - "subtask": "Fold the pink towel from right to left with right gripper", + "subtask": "Put the blue build block on the right of the yellow build block\n", "subtask_index": 3 }, { - "subtask": "null", + "subtask": "Put the green build block on the red build block\n", "subtask_index": 4 - } - ], - "atomic_actions": [ - "grasp", - "lift", - "lower", + }, + { + "subtask": "Put the yellow build block on the behind of the green build block\n", + "subtask_index": 5 + }, + { + "subtask": "Put the orange build block on the right of the yellow build block\n", + "subtask_index": 6 + }, + { + "subtask": "Put the orange build block on the right of the red build block\n", + "subtask_index": 7 + }, + { + "subtask": "Put the orange build block on the right of the blue build block\n", + "subtask_index": 8 + }, + { + "subtask": "Put the green build block on the left of the blue build block\n", + "subtask_index": 9 + }, + { + "subtask": "Put the blue build block on the left of the yellow build block\n", + "subtask_index": 10 + }, + { + "subtask": "Put the yellow build block on the right of the green build block\n", + "subtask_index": 11 + }, + { + "subtask": "Put the orange build block on the front of the blue build block\n", + "subtask_index": 12 + }, + { + "subtask": "Put the orange build block on the left of the green build block\n", + "subtask_index": 13 + }, + { + "subtask": "Put the blue build block on the right of the green build block\n", + "subtask_index": 14 + }, + { + "subtask": "Put the green build block on the behind of the blue build block\n", + "subtask_index": 15 + }, + { + "subtask": "Put the orange build block on the right of the build block\n", + "subtask_index": 16 + }, + { + "subtask": "Put the orange build block on the right of the green build block\n", + "subtask_index": 17 + }, + { + "subtask": "Put the red build block on the behind of the orange build block\n", + "subtask_index": 18 + }, + { + "subtask": "Put the red build block on the front of the yellow build block\n", + "subtask_index": 19 + }, + { + "subtask": "Put the orange build block on the front of the yellow build block\n", + "subtask_index": 20 + }, + { + "subtask": "Put the blue build block on the behind of the green build block\n", + "subtask_index": 21 + }, + { + "subtask": "Put the orange build block in the center of the table\n", + "subtask_index": 22 + }, + { + "subtask": "Put the yellow build block in the center of the table\n", + "subtask_index": 23 + }, + { + "subtask": "Put the green build block on the behind of the yellow build block\n", + "subtask_index": 24 + }, + { + "subtask": "Put the orange build block on the left of the blue build block\n", + "subtask_index": 25 + }, + { + "subtask": "Put the red build block on the right of the blue build block\n", + "subtask_index": 26 + }, + { + "subtask": "Put the yellow build block in the center of the table\n", + "subtask_index": 27 + }, + { + "subtask": "Put the red build block on the behind of the blue build block\n", + "subtask_index": 28 + }, + { + "subtask": "Put the blue build block on the behind of the orange build block\n", + "subtask_index": 29 + }, + { + "subtask": "Put the orange build block on the behind of the green build block\n", + "subtask_index": 30 + }, + { + "subtask": "Put the red build block on the right of the yellow build block\n", + "subtask_index": 31 + }, + { + "subtask": "Put the yellow build block on the front of the orange build block\n", + "subtask_index": 32 + }, + { + "subtask": "Put the blue build block on the right of the green build block\n", + "subtask_index": 33 + }, + { + "subtask": "Put the blue build block on the right of the red build block\n", + "subtask_index": 34 + }, + { + "subtask": "Put the red build block on the behind of the orange build block\n", + "subtask_index": 35 + }, + { + "subtask": "Put the yellow build block on the behind of the green build block\n", + "subtask_index": 36 + }, + { + "subtask": "Put the red build block on the right of the blue build block\n", + "subtask_index": 37 + }, + { + "subtask": "Put the green build block on the behind of the yellow build block\n", + "subtask_index": 38 + }, + { + "subtask": "Put the yellow build block on the behind of the green build block\n", + "subtask_index": 39 + }, + { + "subtask": "Put the orange build block on the behind of the red build block\n", + "subtask_index": 40 + }, + { + "subtask": "Put theyellow build block on the behind of the blue build block\n", + "subtask_index": 41 + }, + { + "subtask": "Put the green build block on the behind of the yellow build block\n", + "subtask_index": 42 + }, + { + "subtask": "Put the red build block on the right of the orange build block\n", + "subtask_index": 43 + }, + { + "subtask": "Put the yellow build block on the right of the red build block\n", + "subtask_index": 44 + }, + { + "subtask": "Put the blue build block on the right of the orange build block\n", + "subtask_index": 45 + }, + { + "subtask": "Put the green build block on the right of the yellow build block\n", + "subtask_index": 46 + }, + { + "subtask": "Put the orange build block in the center of the table\n", + "subtask_index": 47 + }, + { + "subtask": "Put the green build block on the orange build block\n", + "subtask_index": 48 + }, + { + "subtask": "Put the yellow build block on the right of the blue build block\n", + "subtask_index": 49 + }, + { + "subtask": "Put the green build block on the right of the red build block\n", + "subtask_index": 50 + }, + { + "subtask": "Put the green build block on the blue build block\n", + "subtask_index": 51 + }, + { + "subtask": "Put the green build block on the right of the orange build block\n", + "subtask_index": 52 + }, + { + "subtask": "Put the blue build block in the center of the table\n", + "subtask_index": 53 + }, + { + "subtask": "Put the orange build block on the behind of the green build block\n", + "subtask_index": 54 + }, + { + "subtask": "Put the red build block on the right of the yellow build block\n", + "subtask_index": 55 + }, + { + "subtask": "Put the blue build block on the behind of the green build block\n", + "subtask_index": 56 + }, + { + "subtask": "Put the orange build block on the right of the blue build block\n", + "subtask_index": 57 + }, + { + "subtask": "Put the yellow build block on the behind of the orange build block\n", + "subtask_index": 58 + }, + { + "subtask": "Put the orange build block in the center of the table\n", + "subtask_index": 59 + }, + { + "subtask": "Put the blue build block on the right of the yellow build block\n", + "subtask_index": 60 + }, + { + "subtask": "Put the orange build block on the behind of the yellow build block\n", + "subtask_index": 61 + }, + { + "subtask": "Put the yellow build block on the behind of the green build block\n", + "subtask_index": 62 + }, + { + "subtask": "Put the blue build block on the front of the orange build block\n", + "subtask_index": 63 + }, + { + "subtask": "Put the green build block on the behind of the red build block\n", + "subtask_index": 64 + }, + { + "subtask": "Put the green build block in the center of the table\n", + "subtask_index": 65 + }, + { + "subtask": "Put the green build block on the right of the yellow build block\n", + "subtask_index": 66 + }, + { + "subtask": "Put the red build block on the right of the blue build block\n", + "subtask_index": 67 + }, + { + "subtask": "Put the red build block on the behind of the green build block\n", + "subtask_index": 68 + }, + { + "subtask": "Put the blue build block on the behind of the orange build block\n", + "subtask_index": 69 + }, + { + "subtask": "Put the yellow build block on the right of the red build block\n", + "subtask_index": 70 + }, + { + "subtask": "Put the blue build block in the center of the table\n", + "subtask_index": 71 + }, + { + "subtask": "Put the red build block on the behind of the yellow build block\n", + "subtask_index": 72 + }, + { + "subtask": "Put the yellow build block on the right of the red build block\n", + "subtask_index": 73 + }, + { + "subtask": "Put the green build block on the right of the orange build block\n", + "subtask_index": 74 + }, + { + "subtask": "Put the red build block on the blue build block\n", + "subtask_index": 75 + }, + { + "subtask": "Put the orange build block on the left of the yellow build block\n", + "subtask_index": 76 + }, + { + "subtask": "Put the green build block on the right of the red build block\n", + "subtask_index": 77 + }, + { + "subtask": "Put the red build block on the left of the orange build block\n", + "subtask_index": 78 + }, + { + "subtask": "Put the orange build block on the right of the blue build block\n", + "subtask_index": 79 + }, + { + "subtask": "Put the red build block on the right of the orange build block\n", + "subtask_index": 80 + }, + { + "subtask": "Put the orange build block on the behind of the yellow build block\n", + "subtask_index": 81 + }, + { + "subtask": "Put the yellow build block in the center of the table\n", + "subtask_index": 82 + }, + { + "subtask": "Put the yellow build block on the behind of the red build block\n", + "subtask_index": 83 + }, + { + "subtask": "Put the yellow build block on the behind of the blue build block\n", + "subtask_index": 84 + }, + { + "subtask": "Put the orange build block on the behind of the red build block\n", + "subtask_index": 85 + }, + { + "subtask": "Put the yellow build block in the center of the table\n", + "subtask_index": 86 + }, + { + "subtask": "Put the green build block on the right of the blue build block\n", + "subtask_index": 87 + }, + { + "subtask": "Put the orange build block on the right of the yellow build block\n", + "subtask_index": 88 + }, + { + "subtask": "Put the blue build block on the right of the yellow build block\n", + "subtask_index": 89 + }, + { + "subtask": "Put the red build block on the right of the green build block\n", + "subtask_index": 90 + }, + { + "subtask": "Put the orange build block on the behind of the yellow build block\n", + "subtask_index": 91 + }, + { + "subtask": "Put the yellow build block on the blue build block\n", + "subtask_index": 92 + }, + { + "subtask": "Put the red build block on the right of the orange build block\n", + "subtask_index": 93 + }, + { + "subtask": "Put the bluebuild block on the right of the yellow build block\n", + "subtask_index": 94 + }, + { + "subtask": "Put the red build block in the center of the table\n", + "subtask_index": 95 + }, + { + "subtask": "Put the red build block on the blue build block\n", + "subtask_index": 96 + }, + { + "subtask": "Put the orange build block on the right of the blue build block\n", + "subtask_index": 97 + }, + { + "subtask": "Put the blue build block on the behind of the orange build block\n", + "subtask_index": 98 + }, + { + "subtask": "Put the yellow build block on the right of the red build block\n", + "subtask_index": 99 + }, + { + "subtask": "\nPut the yellow build block on the behind of the green build block\n", + "subtask_index": 100 + }, + { + "subtask": "Put the yellow build block on the right of the orange build block\n", + "subtask_index": 101 + }, + { + "subtask": "Put the orange build block on the behind of the blue build block\n", + "subtask_index": 102 + }, + { + "subtask": "Put the orange build block on the right of the red build block\n", + "subtask_index": 103 + }, + { + "subtask": "Put the green build block on the behind of the orange build block\n", + "subtask_index": 104 + }, + { + "subtask": "Put the blue build block on the behind of the yellow build block\n", + "subtask_index": 105 + }, + { + "subtask": "Put the yellow build block on the right of the orange build block\n", + "subtask_index": 106 + }, + { + "subtask": "Put the green build block on the behind of the yellow build block\n", + "subtask_index": 107 + }, + { + "subtask": "Put the green build block on the right of the red build block\n", + "subtask_index": 108 + }, + { + "subtask": "Put the red build block on the front of the blue build block\n", + "subtask_index": 109 + }, + { + "subtask": "Put the red build block on the behind of the green' build block\n", + "subtask_index": 110 + }, + { + "subtask": "Put the red build block on the behind of the orange build block\n", + "subtask_index": 111 + }, + { + "subtask": "Put the red build block on the left of the green build block\n", + "subtask_index": 112 + }, + { + "subtask": "Put the green build block on the behind of the yellow build block\n", + "subtask_index": 113 + }, + { + "subtask": "Put blue the build block on the right of the red build block\n", + "subtask_index": 114 + }, + { + "subtask": "Put the green build block on the orange build block\n", + "subtask_index": 115 + }, + { + "subtask": "Put the orange build block on the right of the red build block\n", + "subtask_index": 116 + }, + { + "subtask": "Put the blue build block on the right of the green build block\n", + "subtask_index": 117 + }, + { + "subtask": "Put the yellow build block on the right of the blue build block\n", + "subtask_index": 118 + }, + { + "subtask": "Put the orange build block on the behind of the blue build block\n", + "subtask_index": 119 + }, + { + "subtask": "\nPut the yellow build block in the center of the table", + "subtask_index": 120 + }, + { + "subtask": "Put the yellow build block on the right of the green build block\n", + "subtask_index": 121 + }, + { + "subtask": "Put the green build block on the behind of the yellow build block\n", + "subtask_index": 122 + }, + { + "subtask": "Put the blue build block in the center of the table\n", + "subtask_index": 123 + }, + { + "subtask": "Put the orange build block in the center of the table\n", + "subtask_index": 124 + }, + { + "subtask": "Put the yellow build block on the right of the green build block\n", + "subtask_index": 125 + }, + { + "subtask": "Put the red build block on the right of the yellow build block\n", + "subtask_index": 126 + }, + { + "subtask": "Put the yellow build block on the behind of the red build block\n", + "subtask_index": 127 + }, + { + "subtask": "Put the red build block on the behind of the green build block\n", + "subtask_index": 128 + }, + { + "subtask": "Put the green build block on the behind of the blue build block", + "subtask_index": 129 + }, + { + "subtask": "Put the green build block on the behind of the orange build block\n", + "subtask_index": 130 + }, + { + "subtask": "Put the blue build block on the right of the green build block\n", + "subtask_index": 131 + }, + { + "subtask": "Put the green build block in the center of the table\n", + "subtask_index": 132 + }, + { + "subtask": "Put the green build block on the behind of the blue build block", + "subtask_index": 133 + }, + { + "subtask": "Put the green build block on the left of the orange build block\n", + "subtask_index": 134 + }, + { + "subtask": "Put the red build block on the behind of the yellow build block\n", + "subtask_index": 135 + }, + { + "subtask": "Put the yellow build block on the right of the green build block\n", + "subtask_index": 136 + }, + { + "subtask": "Put the blue build block on the behind of the red build block\n", + "subtask_index": 137 + }, + { + "subtask": "Put the orange build block on the behind of the red build block\n", + "subtask_index": 138 + }, + { + "subtask": "Put the blue build block on the behind of the green build block\n", + "subtask_index": 139 + }, + { + "subtask": "Put the yellow build block on the blue build block\n", + "subtask_index": 140 + }, + { + "subtask": "Put the red build block on the front of the green build block\n", + "subtask_index": 141 + }, + { + "subtask": "Put the red build block on the left of the ornage build block\n", + "subtask_index": 142 + }, + { + "subtask": "Put the green build block in the center of the table\n", + "subtask_index": 143 + }, + { + "subtask": "Put the red build block on the right of the green build block\n", + "subtask_index": 144 + }, + { + "subtask": "Put the yellow build block on the right of the orange build block\n", + "subtask_index": 145 + }, + { + "subtask": "Put the yellow build block on the right of the blue build block\n", + "subtask_index": 146 + }, + { + "subtask": "Put the XX build block on the XX build block", + "subtask_index": 147 + }, + { + "subtask": "Put the yellow build block in the center of the table\n", + "subtask_index": 148 + }, + { + "subtask": "Put the orange build block on the green build block\n", + "subtask_index": 149 + }, + { + "subtask": "End", + "subtask_index": 150 + }, + { + "subtask": "Put the blue build block on the right of the red build block\n", + "subtask_index": 151 + }, + { + "subtask": "Put the green build block on the right of the orange build block\n", + "subtask_index": 152 + }, + { + "subtask": "Put the greem build block on the behind of the blue build block\n", + "subtask_index": 153 + }, + { + "subtask": "Put the left build block on the left of the green build block\n", + "subtask_index": 154 + }, + { + "subtask": "Put the green build block on the right of the orange build block\n", + "subtask_index": 155 + }, + { + "subtask": "Put the red build block on the left of the blue build block\n", + "subtask_index": 156 + }, + { + "subtask": "Put the blue build block on the right of the orange build block\n", + "subtask_index": 157 + }, + { + "subtask": "Put the yellow build block on the behind of the orange build block\n", + "subtask_index": 158 + }, + { + "subtask": "Put the green build block on the behind of the blue build block\n", + "subtask_index": 159 + }, + { + "subtask": "Put the yellow build block on the right of the orange build block\n", + "subtask_index": 160 + }, + { + "subtask": "Put the blue build block on the behind of the orange build block\n", + "subtask_index": 161 + }, + { + "subtask": "Put the red build block on the front of the green build block\n", + "subtask_index": 162 + }, + { + "subtask": "Put the red build block on the front of the yellow build block\n", + "subtask_index": 163 + }, + { + "subtask": "Put the yellow build block on the behind of the blue build block\n", + "subtask_index": 164 + }, + { + "subtask": "Put the blue build block on the orange build block\n", + "subtask_index": 165 + }, + { + "subtask": "Put the green build block in the center of the table\n", + "subtask_index": 166 + }, + { + "subtask": "Put the orange build block on the front of the blue build block\n", + "subtask_index": 167 + }, + { + "subtask": "Put the blue build block on the right of the yellow build block\n", + "subtask_index": 168 + }, + { + "subtask": "Put the blue build block on the left of the yellow build block\n", + "subtask_index": 169 + }, + { + "subtask": "Put the orange build block on the right of the yellow build block\n", + "subtask_index": 170 + }, + { + "subtask": "Put the orange build block on the left of the red build block\n", + "subtask_index": 171 + }, + { + "subtask": "Put the green build block in the center of the table\n", + "subtask_index": 172 + }, + { + "subtask": "Put the red build block on the right of the blue build block\n", + "subtask_index": 173 + }, + { + "subtask": "Put the blue build block on the right of the yellow build block", + "subtask_index": 174 + }, + { + "subtask": "Put the green build block on the right of the orange build block\n", + "subtask_index": 175 + }, + { + "subtask": "Put the blue build block in the center of the table\n", + "subtask_index": 176 + }, + { + "subtask": "Put the green build block on the right of the red build block\n", + "subtask_index": 177 + }, + { + "subtask": "Put the green build block on the right of the blue build block\n", + "subtask_index": 178 + }, + { + "subtask": "Put the yellow build block in the center of the table\n", + "subtask_index": 179 + }, + { + "subtask": "Abnormal", + "subtask_index": 180 + }, + { + "subtask": "Put the red build block on the behind of the orange build block\n", + "subtask_index": 181 + }, + { + "subtask": "Put the red build block on the yellow build block\n", + "subtask_index": 182 + }, + { + "subtask": "Put the orange build block on the right of the yellow build block\n", + "subtask_index": 183 + }, + { + "subtask": "Put the red build block on the right of the yellow build block\n", + "subtask_index": 184 + }, + { + "subtask": "Put the orange build block on the right of the blue build block\n", + "subtask_index": 185 + }, + { + "subtask": "Put the orange build block on the orange build block\n", + "subtask_index": 186 + }, + { + "subtask": "Put the blue build block on the behind of the red build block\n", + "subtask_index": 187 + }, + { + "subtask": "Put the yellow build block on the right of the blue build block\n", + "subtask_index": 188 + }, + { + "subtask": "Put the yellow build block on the behind of the green build block\n", + "subtask_index": 189 + }, + { + "subtask": "Put the yellow build block on the left of the blue build block\n", + "subtask_index": 190 + }, + { + "subtask": "Put the yellow build block on the right of the orange build block\n", + "subtask_index": 191 + }, + { + "subtask": "Put the yellow build block on the blue build block\n", + "subtask_index": 192 + }, + { + "subtask": "Put the yellow build block on the behind of the blue build block\n", + "subtask_index": 193 + }, + { + "subtask": "Put the green build block on the right of the blue build block\n", + "subtask_index": 194 + }, + { + "subtask": "Put the orange build block on the right of the yellow build block\n", + "subtask_index": 195 + }, + { + "subtask": "Put the red build block on the behind of the green build block\n", + "subtask_index": 196 + }, + { + "subtask": "Put the red build block in the center of the table\n", + "subtask_index": 197 + }, + { + "subtask": "Put the yellow build block on the behind of the blue build block\n", + "subtask_index": 198 + }, + { + "subtask": "Put the green build block on the behind of the orange build block\n", + "subtask_index": 199 + }, + { + "subtask": "Put the yellow build block on the behind of the red build block\n", + "subtask_index": 200 + }, + { + "subtask": "Put the green build block on the right of the yellow build block\n", + "subtask_index": 201 + }, + { + "subtask": "Put the green build block on the red build block\n", + "subtask_index": 202 + }, + { + "subtask": "Put the blue build block on the right of the orange build block\n", + "subtask_index": 203 + }, + { + "subtask": "Put the red build block on the behind of the orange build block\n", + "subtask_index": 204 + }, + { + "subtask": "Put the red build block in the center of the table\n", + "subtask_index": 205 + }, + { + "subtask": "Put the red build block in the center of the table\n", + "subtask_index": 206 + }, + { + "subtask": "Put the green build block on the right of the orange build block\n", + "subtask_index": 207 + }, + { + "subtask": "Put the green build block on the front of the blue build block\n", + "subtask_index": 208 + }, + { + "subtask": "Put the yellow build block on the behind of the orange build block\n", + "subtask_index": 209 + }, + { + "subtask": "Put the yellow build block on the behind of the blue build block\n", + "subtask_index": 210 + }, + { + "subtask": "Put the green build block on the blue build block\n", + "subtask_index": 211 + }, + { + "subtask": "Put the blue build block on the right of the yellow build block", + "subtask_index": 212 + }, + { + "subtask": "Put the blue build block on the right of the orange build block\n", + "subtask_index": 213 + }, + { + "subtask": "Put the orange build block on the left of the red build block\n", + "subtask_index": 214 + }, + { + "subtask": "Put the orange build block on the right of the blue build block\n", + "subtask_index": 215 + }, + { + "subtask": "Put the blue build block on the front of the green build block\n", + "subtask_index": 216 + }, + { + "subtask": "Put the yellow build block on the orange build block\n", + "subtask_index": 217 + }, + { + "subtask": "Put the yellow build block on the front of the red build block\n", + "subtask_index": 218 + }, + { + "subtask": "Put the orange build block on the behind of the red build block\n", + "subtask_index": 219 + }, + { + "subtask": "Put the yellow build block on the behind of the green build block\n", + "subtask_index": 220 + }, + { + "subtask": "Put the blue build block on the right of orange the build block\n", + "subtask_index": 221 + }, + { + "subtask": "Put the red build block on the right of the blue build block\n", + "subtask_index": 222 + }, + { + "subtask": "Put the green build block on the behind of the orange build block\n", + "subtask_index": 223 + }, + { + "subtask": "Put the green build block on the behind of the yellow build block\n", + "subtask_index": 224 + }, + { + "subtask": "Put the yellow build block on the right of the green build block\n", + "subtask_index": 225 + }, + { + "subtask": "Put the orange build block on the right of the green build block\n", + "subtask_index": 226 + }, + { + "subtask": "Put the orange build block on the right of the yellow build block\n", + "subtask_index": 227 + }, + { + "subtask": "Put the red build block on the green build block\n", + "subtask_index": 228 + }, + { + "subtask": "Put the red build block on the right of the blue build block\n", + "subtask_index": 229 + }, + { + "subtask": "Put the blue build block on the behind of the green build block\n", + "subtask_index": 230 + }, + { + "subtask": "Put the orange build block on the right of the red build block\n", + "subtask_index": 231 + }, + { + "subtask": "Put the blue build block on the red build block\n", + "subtask_index": 232 + }, + { + "subtask": "Put the orange build block on the right of the blue build block\n", + "subtask_index": 233 + }, + { + "subtask": "Put the orange build block on the left of the yellow build block\n", + "subtask_index": 234 + }, + { + "subtask": "Put the right build block on the right of the yellow build block\n", + "subtask_index": 235 + }, + { + "subtask": "Put the green build block on the right of the red build block\n", + "subtask_index": 236 + }, + { + "subtask": "Put the orange build block on the right of the green build block\n", + "subtask_index": 237 + }, + { + "subtask": "Put the blue build block in the center of the table\n", + "subtask_index": 238 + }, + { + "subtask": "Put the orange build block on the behind of the yellow build block\n", + "subtask_index": 239 + }, + { + "subtask": "Put the orange build block on the right of the blue build block\n", + "subtask_index": 240 + }, + { + "subtask": "Put the orange build block on the behind of the blue build block\n", + "subtask_index": 241 + }, + { + "subtask": "Put the red build block on the behind of the yellow build block\n", + "subtask_index": 242 + }, + { + "subtask": "Put the ornage build block on the behind of the green build block\n", + "subtask_index": 243 + }, + { + "subtask": "Put the blue build block on the left of the yellow build block\n", + "subtask_index": 244 + }, + { + "subtask": "Put the green build block on the front of the blue build block\n", + "subtask_index": 245 + }, + { + "subtask": "Put the green build block on the behind of the red build block\n", + "subtask_index": 246 + }, + { + "subtask": "Put the green build block on the right of the orange build block\n", + "subtask_index": 247 + }, + { + "subtask": "Put the green build block on the right of the orange build block\n", + "subtask_index": 248 + }, + { + "subtask": "Put the yellow build block on the behind of the red build block\n", + "subtask_index": 249 + }, + { + "subtask": "Put the blue build block on the green build block\n", + "subtask_index": 250 + }, + { + "subtask": "Put the blue build block on the right of the orange build block", + "subtask_index": 251 + }, + { + "subtask": "Put the orange build block on the right of the green build block\n", + "subtask_index": 252 + }, + { + "subtask": "Put the orange build block on the behind of the yellow build block\n", + "subtask_index": 253 + }, + { + "subtask": "Put the green build block in the center of the table\n", + "subtask_index": 254 + }, + { + "subtask": "Put the blue build block on the behind of the orange build block\n", + "subtask_index": 255 + }, + { + "subtask": "Put the green build block on the left of the orange build block", + "subtask_index": 256 + }, + { + "subtask": "Put the green build block on the right of the yellow build block\n", + "subtask_index": 257 + }, + { + "subtask": "Put the green build block on the left of the blue build block\n", + "subtask_index": 258 + }, + { + "subtask": "Put the orange build block on the right of the green build block\n", + "subtask_index": 259 + }, + { + "subtask": "Put the green build block on the orange build block\n", + "subtask_index": 260 + }, + { + "subtask": "Put the blue build block on the behind of the green build block\n", + "subtask_index": 261 + }, + { + "subtask": "Put the yellow build block on the behind of the red build block\n", + "subtask_index": 262 + }, + { + "subtask": "Put the orange build block on the green build block\n", + "subtask_index": 263 + }, + { + "subtask": "Put the red build block on the behind of the blue build block\n", + "subtask_index": 264 + }, + { + "subtask": "Put the blue build block on the right of the orange build block\n", + "subtask_index": 265 + }, + { + "subtask": "move the build block to the center of the table", + "subtask_index": 266 + }, + { + "subtask": "Put the green build block on the right of the red build block\n", + "subtask_index": 267 + }, + { + "subtask": "Put the red build block on the behind of the green build block\n", + "subtask_index": 268 + }, + { + "subtask": "Put the red build block on the behind of the blue build block\n", + "subtask_index": 269 + }, + { + "subtask": "Put the yellow build block on the behind of the orange build block\n", + "subtask_index": 270 + }, + { + "subtask": "Put the red build block on the right of the orange build block\n", + "subtask_index": 271 + }, + { + "subtask": "Put the green build block on the right of the yellow build block\n", + "subtask_index": 272 + }, + { + "subtask": "Put the red build block on the right of the yellow build block\n", + "subtask_index": 273 + }, + { + "subtask": "Put the red build block in the center of the table\n", + "subtask_index": 274 + }, + { + "subtask": "Put the red build block on the right of the blue build block", + "subtask_index": 275 + }, + { + "subtask": "Put the yellow build block on the behind of the green build block", + "subtask_index": 276 + }, + { + "subtask": "Put the red build block on the right of the orange build block\n", + "subtask_index": 277 + }, + { + "subtask": "Put the blue build block on the right of the orange build block\n", + "subtask_index": 278 + }, + { + "subtask": "Put the orange build block on the behind of the green build block\n", + "subtask_index": 279 + }, + { + "subtask": "Put the blue build block on the front of the green build block\n", + "subtask_index": 280 + }, + { + "subtask": "Put the ornage build block on the behind of the blue build block\n", + "subtask_index": 281 + }, + { + "subtask": "Put the yellow build block on the behind of the orange build block\n", + "subtask_index": 282 + }, + { + "subtask": "Put the green build block on the behind of the blue build block\n", + "subtask_index": 283 + }, + { + "subtask": "Put the green build block on the left of the blue build block\n", + "subtask_index": 284 + }, + { + "subtask": "Put the blue build block in the center of the table\n", + "subtask_index": 285 + }, + { + "subtask": "Put the green build block on the right of the yellow build block\n", + "subtask_index": 286 + }, + { + "subtask": "Put the yellow build block on the behind of the blue build block", + "subtask_index": 287 + }, + { + "subtask": "Put the green build block on the right of the orange build block\n", + "subtask_index": 288 + }, + { + "subtask": "Put the green build block on the blue build block\n", + "subtask_index": 289 + }, + { + "subtask": "Put the orange build block on the behind of the blue build block\n", + "subtask_index": 290 + }, + { + "subtask": "Put the red build block on the behind of the yellow build block\n", + "subtask_index": 291 + }, + { + "subtask": "Put the orange build block on the blue build block\n", + "subtask_index": 292 + }, + { + "subtask": "Put the green build block on the right of the orange build block\n", + "subtask_index": 293 + }, + { + "subtask": "Put the orange build block on the left of the red build block\n", + "subtask_index": 294 + }, + { + "subtask": "Put the green build block in the center of the table", + "subtask_index": 295 + }, + { + "subtask": "null", + "subtask_index": 296 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_chest_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_chest_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 238, + "total_frames": 232528, + "fps": 30, + "total_tasks": 297, + "total_videos": 952, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 4, + "dataset_size": "2.20 GB" + }, + "frame_num": 232528, + "dataset_size": "2.20 GB", + "data_structure": "Agilex_Cobot_Magic_connect_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (226 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_chest_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:237" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_chest_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] + }, + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_chest_rgb/episode_000000.mp4" + }, + "Galaxea_R1_Lite_storage_object_yellow_basket": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Galaxea_R1_Lite_storage_object_yellow_basket", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "yellow_basket", + "level1": "basket", + "level2": "yellow_basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "fruits", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_chemical_products", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_cup", + "level1": "cups", + "level2": "blue_cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_pot", + "level1": "cookware", + "level2": "blue_pot", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "toast_slices", + "level1": "bread", + "level2": "toast_slices", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_towel", + "level1": "towels", + "level2": "brown_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "can", + "level1": "snacks", + "level2": "can", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "coke(slim_can)", + "level1": "beverages", + "level2": "coke(slim_can)", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "potato_chips", + "level1": "snacks", + "level2": "potato_chips", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chocolate", + "level1": "snacks", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "compass", + "level1": "stationery", + "level2": "compass", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "building_blocks", + "level1": "toys", + "level2": "building_blocks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "egg_beater", + "level1": "kitchen_supplies", + "level2": "egg_beater", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eraser", + "level1": "stationery", + "level2": "eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "snacks", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_chemical_products", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_lemon", + "level1": "fruits", + "level2": "green_lemon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "fruits", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "power_strip", + "level1": "electrical_control_equipment", + "level2": "power_strip", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "round_bread", + "level1": "bread", + "level2": "round_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_necessities", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "square_building_blocks", + "level1": "building_blocks", + "level2": "square_building_blocks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tape", + "level1": "stationery", + "level2": "tape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cake", + "level1": "food", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "duck", + "level1": "toys", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ambrosial_yogurt", + "level1": "beverages", + "level2": "ambrosial_yogurt", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use a gripper to pick the target object and place on the yellow basket." + ], + "sub_tasks": [ + { + "subtask": "Grasp the blue pot with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "Grasp the plugboard with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Place the round wooden block on the yellow basket with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Grasp the potato chips with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Place the tin on the yellow basket with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "Place the plugboard on the yellow basket with the left gripper", + "subtask_index": 5 + }, + { + "subtask": "Place the peach on the yellow basket with the left gripper", + "subtask_index": 6 + }, + { + "subtask": "Place the blue cup on the yellow basket with the right gripper", + "subtask_index": 7 + }, + { + "subtask": "Place the brown towel on the yellow basket with the right gripper", + "subtask_index": 8 + }, + { + "subtask": "Grasp the compasses with the right gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the green lemon on the yellow basket with the right gripper", + "subtask_index": 10 + }, + { + "subtask": "Place the duck toy on the yellow basket with the right gripper", + "subtask_index": 11 + }, + { + "subtask": "Grasp the duck toy with the left gripper", + "subtask_index": 12 + }, + { + "subtask": "Place the blackboard erasure on the yellow basket with the left gripper", + "subtask_index": 13 + }, + { + "subtask": "Grasp the blue cup with the left gripper", + "subtask_index": 14 + }, + { + "subtask": "Place the bread slice on the yellow basket with the left gripper", + "subtask_index": 15 + }, + { + "subtask": "Place the hard facial cleanser on the yellow basket with the right gripper", + "subtask_index": 16 + }, + { + "subtask": "Place the peach on the yellow basket with the right gripper", + "subtask_index": 17 + }, + { + "subtask": "Grasp the back scratcher with the right gripper", + "subtask_index": 18 + }, + { + "subtask": "Place the tape on the yellow basket with the right gripper", + "subtask_index": 19 + }, + { + "subtask": "Place the blue pot on the yellow basket with the right gripper", + "subtask_index": 20 + }, + { + "subtask": "Grasp the chocolate cake with the right gripper", + "subtask_index": 21 + }, + { + "subtask": "Place the tape on the yellow basket with the left gripper", + "subtask_index": 22 + }, + { + "subtask": "Place the shower sphere on the yellow basket with the left gripper", + "subtask_index": 23 + }, + { + "subtask": "Place the yogurt on the yellow basket with the left gripper", + "subtask_index": 24 + }, + { + "subtask": "Grasp the shower sphere with the left gripper", + "subtask_index": 25 + }, + { + "subtask": "Grasp the plugboard with the right gripper", + "subtask_index": 26 + }, + { + "subtask": "Grasp the tin with the left gripper", + "subtask_index": 27 + }, + { + "subtask": "Grasp the brown towel with the left gripper", + "subtask_index": 28 + }, + { + "subtask": "Place the compasses on the yellow basket with the right gripper", + "subtask_index": 29 + }, + { + "subtask": "Place the hard facial cleanser on the yellow basket with the left gripper", + "subtask_index": 30 + }, + { + "subtask": "Grasp the hard facial cleanser with the left gripper", + "subtask_index": 31 + }, + { + "subtask": "Place the potato chips on the yellow basket with the left gripper", + "subtask_index": 32 + }, + { + "subtask": "Grasp the chocolate with the right gripper", + "subtask_index": 33 + }, + { + "subtask": "Place the banana on the yellow basket with the right gripper", + "subtask_index": 34 + }, + { + "subtask": "Grasp the peach with the right gripper", + "subtask_index": 35 + }, + { + "subtask": "Grasp the brown towel with the right gripper", + "subtask_index": 36 + }, + { + "subtask": "Place the chocolate cake on the yellow basket with the left gripper", + "subtask_index": 37 + }, + { + "subtask": "Place the shower sphere on the yellow basket with the right gripper", + "subtask_index": 38 + }, + { + "subtask": "Grasp the hard facial cleanser with the right gripper", + "subtask_index": 39 + }, + { + "subtask": "Place the back scratcher on the yellow basket with the right gripper", + "subtask_index": 40 + }, + { + "subtask": "Grasp the peach with the left gripper", + "subtask_index": 41 + }, + { + "subtask": "Place the blue cup on the yellow basket with the left gripper", + "subtask_index": 42 + }, + { + "subtask": "Grasp the green lemon with the right gripper", + "subtask_index": 43 + }, + { + "subtask": "Place the soft facial cleanser on the yellow basket with the right gripper", + "subtask_index": 44 + }, + { + "subtask": "Grasp the bread slice with the right gripper", + "subtask_index": 45 + }, + { + "subtask": "Grasp the potato chips with the left gripper", + "subtask_index": 46 + }, + { + "subtask": "Grasp the duck toy with the right gripper", + "subtask_index": 47 + }, + { + "subtask": "End", + "subtask_index": 48 + }, + { + "subtask": "Grasp the blackboard erasure with the left gripper", + "subtask_index": 49 + }, + { + "subtask": "Grasp the coke with the left gripper", + "subtask_index": 50 + }, + { + "subtask": "Place the blackboard erasure on the yellow basket with the right gripper", + "subtask_index": 51 + }, + { + "subtask": "Grasp the round wooden block with the left gripper", + "subtask_index": 52 + }, + { + "subtask": "Place the tin on the yellow basket with the left gripper", + "subtask_index": 53 + }, + { + "subtask": "Place the bread slice on the yellow basket with the right gripper", + "subtask_index": 54 + }, + { + "subtask": "Place the compasses on the yellow basket with the left gripper", + "subtask_index": 55 + }, + { + "subtask": "Place the square wooden block on the yellow basket with the left gripper", + "subtask_index": 56 + }, + { + "subtask": "Place the chocolate cake on the yellow basket with the right gripper", + "subtask_index": 57 + }, + { + "subtask": "Grasp the compasses with the left gripper", + "subtask_index": 58 + }, + { + "subtask": "Place the peach doll on the yellow basket with the left gripper", + "subtask_index": 59 + }, + { + "subtask": "Grasp the blue pot with the right gripper", + "subtask_index": 60 + }, + { + "subtask": "Grasp the round bread with the right gripper", + "subtask_index": 61 + }, + { + "subtask": "Grasp the chocolate cake with the left gripper", + "subtask_index": 62 + }, + { + "subtask": "Place the coke on the yellow basket with the left gripper", + "subtask_index": 63 + }, + { + "subtask": "Place the duck toy on the yellow basket with the left gripper", + "subtask_index": 64 + }, + { + "subtask": "Grasp the tin with the right gripper", + "subtask_index": 65 + }, + { + "subtask": "Place the round wooden block on the yellow basket with the right gripper", + "subtask_index": 66 + }, + { + "subtask": "Place the square wooden block on the yellow basket with the right gripper", + "subtask_index": 67 + }, + { + "subtask": "Grasp the tape with the right gripper", + "subtask_index": 68 + }, + { + "subtask": "Grasp the coke with the right gripper", + "subtask_index": 69 + }, + { + "subtask": "Place the round bread on the yellow basket with the right gripper", + "subtask_index": 70 + }, + { + "subtask": "Grasp the tape with the left gripper", + "subtask_index": 71 + }, + { + "subtask": "Grasp the square chewing gum with the right gripper", + "subtask_index": 72 + }, + { + "subtask": "Grasp the peach doll with the left gripper", + "subtask_index": 73 + }, + { + "subtask": "Grasp the shower sphere with the right gripper", + "subtask_index": 74 + }, + { + "subtask": "Place the brown towel on the yellow basket with the left gripper", + "subtask_index": 75 + }, + { + "subtask": "Grasp the bread slice with the left gripper", + "subtask_index": 76 + }, + { + "subtask": "Grasp the yogurt with the left gripper", + "subtask_index": 77 + }, + { + "subtask": "Grasp the blackboard erasure with the right gripper", + "subtask_index": 78 + }, + { + "subtask": "Place the coke on the yellow basket with the right gripper", + "subtask_index": 79 + }, + { + "subtask": "Place the chocolate on the yellow basket with the right gripper", + "subtask_index": 80 + }, + { + "subtask": "Place the potato chips on the yellow basket with the right gripper", + "subtask_index": 81 + }, + { + "subtask": "Place the plugboard on the yellow basket with the right gripper", + "subtask_index": 82 + }, + { + "subtask": "Place the blue pot on the yellow basket with the left gripper", + "subtask_index": 83 + }, + { + "subtask": "Place the round bread on the yellow basket with the left gripper", + "subtask_index": 84 + }, + { + "subtask": "Grasp the round bread with the left gripper", + "subtask_index": 85 + }, + { + "subtask": "Grasp the square wooden block with the left gripper", + "subtask_index": 86 + }, + { + "subtask": "Grasp the blue cup with the right gripper", + "subtask_index": 87 + }, + { + "subtask": "Grasp the soft facial cleanser with the right gripper", + "subtask_index": 88 + }, + { + "subtask": "Grasp the square wooden block with the right gripper", + "subtask_index": 89 + }, + { + "subtask": "Grasp the round wooden block with the right gripper", + "subtask_index": 90 + }, + { + "subtask": "Place the square chewing gum on the yellow basket with the right gripper", + "subtask_index": 91 + }, + { + "subtask": "Grasp the banana with the right gripper", + "subtask_index": 92 + }, + { + "subtask": "null", + "subtask_index": 93 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Galaxea_R1_Lite" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_left_rgb", + "cam_head_right_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 102, + "total_frames": 18153, + "fps": 30, + "total_tasks": 94, + "total_videos": 408, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "742.42 MB" + }, + "frame_num": 18153, + "dataset_size": "742.42 MB", + "data_structure": "Galaxea_R1_Lite_storage_object_yellow_basket_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (90 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:101" + }, + "features": { + "observation.images.cam_head_left_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + }, + "Airbot_MMK2_prepare_tea": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_prepare_tea", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "small_teapot", + "level1": "teacus", + "level2": "small_teapot", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "inner_pot_of_the_teapot", + "level1": "kitchen_supplies", + "level2": "inner_pot_of_the_teapot", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tea_canister", + "level1": "tea_bags", + "level2": "tea_canister", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tea", + "level1": "tea_bags", + "level2": "tea", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "with the right hand, take out the tea leaves from the tea canister and put them into the inner pot of the teapot. with the left hand, put the inner pot of the teapot into the teapot and then close the lid of the teapot." + ], + "sub_tasks": [ + { + "subtask": "Place the tea leaves into the tea strainer with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "End", + "subtask_index": 1 + }, + { + "subtask": "Close the teapot lid with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Grasp the tea strainer with the left gripper", + "subtask_index": 3 + }, + { + "subtask": "Abnormal", + "subtask_index": 4 + }, + { + "subtask": "Place the tea strainer into the teapot with the left gripper", + "subtask_index": 5 + }, + { + "subtask": "Grasp the tea leaves with the right gripper", + "subtask_index": 6 + }, + { + "subtask": "null", + "subtask_index": 7 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 89, + "total_frames": 70954, + "fps": 30, + "total_tasks": 8, + "total_videos": 356, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "2.99 GB" + }, + "frame_num": 70954, + "dataset_size": "2.99 GB", + "data_structure": "Airbot_MMK2_prepare_tea_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (77 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:88" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "RMC-AIDA-L_organise_the_document_bag": { + "path": "RMC-AIDA-L_organise_the_document_bag", + "dataset_name": "organise_the_document_bag", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick", + "pull" + ], + "tasks": "Pulling up the zipper on the transparent plastic bag with the right gripper to close it", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "document_bag", + "level1": "container", + "level2": "document_bag", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-225360", + "dataset_size": "2.0GB", + "statistics": { + "total_episodes": 480, + "total_frames": 225360, + "total_tasks": 4, + "total_videos": 1440, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "327e0fe7-1be9-4724-b1ae-f511d8ab7f6b", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Pulling up the zipper on the transparent plastic bag with the right gripper to close it", + "Pulling up the zipper on the transparent plastic bag with the left gripper to close it", + "Pick up the transparent plastic bag with the left gripper", + "Pick up the transparent plastic bag with the right gripper", + "Place the transparent plastic bag", + "Place the transparent plastic bag with the right gripper", + "End", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "RMC-AIDA-L_organise_the_document_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_organise_the_document_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Cobot_Magic_put_in_the_pear": { + "path": "Cobot_Magic_put_in_the_pear", + "dataset_name": "put_in_the_pear", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Grasp the high-fiber fruit", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pear", + "level1": "fruit", + "level2": "pear", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-21690", + "dataset_size": "461.5MB", + "statistics": { + "total_episodes": 97, + "total_frames": 21690, + "total_tasks": 1, + "total_videos": 291, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 50 + }, + "dataset_uuid": "9ef9a074-b4b1-4d16-a577-460381b81a91", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the high-fiber fruit", + "abnormal", + "Place it on the right side of the table", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_put_in_the_pear_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_put_in_the_pear_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Galbot_g1_steamer_storage_baozi_e": { + "path": "Galbot_g1_steamer_storage_baozi_e", + "dataset_name": "steamer_storage_baozi_e", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Grasp the pot lid with left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "baozi", + "level1": "food", + "level2": "baozi", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "steamer", + "level1": "cookware", + "level2": "steamer", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pot_lid", + "level1": "daily_necessities", + "level2": "pot_lid", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-627792", + "dataset_size": "11.2GB", + "statistics": { + "total_episodes": 607, + "total_frames": 627792, + "total_tasks": 1, + "total_videos": 1821, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "c1e9fbc2-7775-4cc2-9a8c-d4c58aca210a", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the pot lid with left gripper", + "Abnormal", + "Place the pot lid on the steamer with left gripper", + "End", + "Place the baozi on the steamer with right gripper", + "Grasp the baozi in the plate with right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Galbot_g1_steamer_storage_baozi_e_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Galbot_g1_steamer_storage_baozi_e_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "RMC-AIDA-L_pull_open_bag": { + "path": "RMC-AIDA-L_pull_open_bag", + "dataset_name": "pull_open_bag", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pull", + "zip", + "up" + ], + "tasks": "Grab the zipper with your left hand.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "food_bag", + "level1": "container", + "level2": "food_bag", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "zipper", + "level1": "fastener", + "level2": "zipper", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-684327", + "dataset_size": "8.3GB", + "statistics": { + "total_episodes": 1396, + "total_frames": 684327, + "total_tasks": 5, + "total_videos": 4188, + "total_chunks": 2, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "4363efec-236f-4495-a86b-14c3a5d20345", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grab the zipper with your left hand.", + "Abnormal", + "Place the bag on the table with left gripper", + "Pull open the zipper with your right hand.", + "Grab the bag with your left hand.", + "Static", + "Unzip the zipper of bag with right gripper", + "End", + "Pick up the bag with left gripper", + "Pick up the bag with right gripper", + "Deliver the bag from right gripper to left gripper", + "Pull up the zipper with your right hand.", + "Discard.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "RMC-AIDA-L_pull_open_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_pull_open_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)" + }, + "Airbot_MMK2_storage_peach_pear": { + "path": "Airbot_MMK2_storage_peach_pear", + "dataset_name": "Airbot_MMK2_storage_peach_pear", + "robot_type": "", + "end_effector_type": [ + "five_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Grasp a peach with the left gripper", + "objects": [ + { + "object_name": "peach", + "level1": "fruits", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pear", + "level1": "fruits", + "level2": "pear", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "storage_box", + "level1": "storage_utensils", + "level2": "storage_box", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-2804", + "dataset_size": "114.2MB", + "statistics": { + "total_episodes": 12, + "total_frames": 2804, + "total_tasks": 1, + "total_videos": 48, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "c48b833c-02bd-4fd5-a2c2-5b03bf2c2936", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp a peach with the left gripper", + "Grasp a pear with the right gripper", + "Abnormal", + "Place the peach into the left compartment of the storage box with the left gripper", + "Place the pear into the right compartment of the storage box with the right gripper", + "End", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Airbot_MMK2_storage_peach_pear_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Airbot_MMK2_storage_peach_pear_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "G1edu-u3_plate_storage_doll": { + "path": "G1edu-u3_plate_storage_doll", + "dataset_name": "plate_storage_doll", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Static", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "doll", + "level1": "toys", + "level2": "doll", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-133026", + "dataset_size": "3.7GB", + "statistics": { + "total_episodes": 388, + "total_frames": 133026, + "total_tasks": 2, + "total_videos": 1164, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "45f1baec-34a9-4f8d-bc50-3ef2086cf1a9", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Static", + "Place the pink doll into the plate with the left gripper", + "Grasp the pink doll with the right gripper", + "End", + "Place the pink doll into the plate with the right gripper", + "Grasp the pink doll with the left gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_plate_storage_doll_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_plate_storage_doll_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "R1_Lite_take_and_put_the_bowl": { + "path": "R1_Lite_take_and_put_the_bowl", + "dataset_name": "take_and_put_the_bowl", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place it on the table", + "objects": [ + { + "object_name": "cabinet", + "level1": "furniture", + "level2": "cabinet", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "container", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cabinet_door", + "level1": "furniture", + "level2": "cabinet_door", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "container", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-189200", + "dataset_size": "7.0GB", + "statistics": { + "total_episodes": 94, + "total_frames": 189200, + "total_tasks": 1, + "total_videos": 282, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "29eb76ea-0c49-4f11-8073-7f7501b77736", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place it on the table", + "Put it back into the cabinet", + "Take the plate from the cabinet", + "Pick up the plate from the table", + "Close the cabinet door", + "Pick up the bowl from the table", + "Take the bowl from the cabinet", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_take_and_put_the_bowl_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_take_and_put_the_bowl_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "AIRBOT_MMK2_chop_the_scallions": { + "path": "AIRBOT_MMK2_chop_the_scallions", + "dataset_name": "chop_the_scallions", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place", + "cut" + ], + "tasks": "Grasp the kitchen knife with the right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "knife", + "level1": "food", + "level2": "green_onion", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-14308", + "dataset_size": "573.9MB", + "statistics": { + "total_episodes": 50, + "total_frames": 14308, + "total_tasks": 1, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "5beeb056-0ab1-4611-8791-0f80b7dc82d2", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the kitchen knife with the right gripper", + "Press the scallion with the left gripper", + "Place the kitchen knife back on the knife holder with the right gripper", + "Static", + "End", + "Abnormal", + "Cut scallions with the right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AIRBOT_MMK2_chop_the_scallions_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_chop_the_scallions_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "leju_robot_box_storage_parcel_f": { + "path": "leju_robot_box_storage_parcel_f", + "dataset_name": "box_storage_parcel_f", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the package into the parcel locker.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "box", + "level1": "home_storage", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "parcel", + "level1": "container", + "level2": "parcel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "conveyor_belt", + "level1": "industrial_equipment", + "level2": "conveyor_belt", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-185943", + "dataset_size": "9.5GB", + "statistics": { + "total_episodes": 497, + "total_frames": 185943, + "total_tasks": 1, + "total_videos": 1491, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "52fd6211-f929-4608-b45d-501557f97fc8", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the package into the parcel locker.", + "Pick up the package from the inbound machine.", + "Pick up the package from the conveyor belt.", + "Place the package onto the inbound machine.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_box_storage_parcel_f_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_box_storage_parcel_f_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "G1edu-u3_place_plastic_bowl_ag": { + "path": "G1edu-u3_place_plastic_bowl_ag", + "dataset_name": "place_plastic_bowl_ag", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "pick", + "place" + ], + "tasks": "Place the plastic bowl on the table with right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plastic_bowl", + "level1": "container", + "level2": "plastic_bowl", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-7219", + "dataset_size": "116.0MB", + "statistics": { + "total_episodes": 38, + "total_frames": 7219, + "total_tasks": 1, + "total_videos": 38, + "total_chunks": 1, + "chunks_size": 39, + "fps": 30 + }, + "dataset_uuid": "200d19c3-55a3-44fd-830a-10c1b2f63ed7", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the plastic bowl on the table with right gripper", + "End", + "Place the plastic bowl on the table with left gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_place_plastic_bowl_ag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_place_plastic_bowl_ag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "R1_Lite_put_the_pillow_on_the_bed": { + "path": "R1_Lite_put_the_pillow_on_the_bed", + "dataset_name": "put_the_pillow_on_the_bed", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Pick up the pillow", + "objects": [ + { + "object_name": "pillow", + "level1": "daily_necessities", + "level2": "pillow", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bed", + "level1": "furniture", + "level2": "bed", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-53484", + "dataset_size": "1.7GB", + "statistics": { + "total_episodes": 48, + "total_frames": 53484, + "total_tasks": 1, + "total_videos": 144, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "c25eff25-52c9-4178-a906-2f1040e89068", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Pick up the pillow", + "Place the pillow at the end of the bed", + "Move to the head of the bed", + "Move to the foot of the bed", + "Place the pillow at the bedside", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_put_the_pillow_on_the_bed_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_put_the_pillow_on_the_bed_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_stack_block": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_stack_block", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "bedroom", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "square_building_blocks", + "level1": "toys", + "level2": "square_building_blocks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "block_pillar", + "level1": "toys", + "level2": "block_pillar", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "place the square building blocks in the center with left hand and put the cylindrical building blocks on top with right hand." + ], + "sub_tasks": [ + { + "subtask": "Grasp the blue build blocks with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "Grasp the green build blocks with the right gripper", + "subtask_index": 1 + }, + { + "subtask": "Place the blue build blocks on the Mini table with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the green build blocks on the blue build block with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Grasp the blue diamond shaped build blocks with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "Grasp the red build blocks with the right gripper", + "subtask_index": 5 + }, + { + "subtask": "Place the blue diamond shaped build blocks on the glasses case with the right gripper", + "subtask_index": 6 + }, + { + "subtask": "Place the purple build blocks on the red build block with the right gripper", + "subtask_index": 7 + }, + { + "subtask": "Place the green build blocks on the center of the table with the left gripper", + "subtask_index": 8 + }, + { + "subtask": "Grasp the green build blocks with the left gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the glasses case on the red and green build blocks with the left gripper", + "subtask_index": 10 + }, + { + "subtask": "Grasp the purple build blocks with the right gripper", + "subtask_index": 11 + }, + { + "subtask": "Place the blue build blocks on the yellow build block with the left gripper", + "subtask_index": 12 + }, + { + "subtask": "Grasp the glasses case with the left gripper", + "subtask_index": 13 + }, + { + "subtask": "Place the green cylindrical build blocks on the green build blocks with the right gripper", + "subtask_index": 14 + }, + { + "subtask": "End", + "subtask_index": 15 + }, + { + "subtask": "Place the blue build blocks on the pink build block with the left gripper", + "subtask_index": 16 + }, + { + "subtask": "Place the red build blocks on the blue build block with the right gripper", + "subtask_index": 17 + }, + { + "subtask": "Grasp the green cylindrical build blocks with the right gripper", + "subtask_index": 18 + }, + { + "subtask": "null", + "subtask_index": 19 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 236, + "total_frames": 60925, + "fps": 30, + "total_tasks": 20, + "total_videos": 944, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "2.00 GB" + }, + "frame_num": 60925, + "dataset_size": "2.00 GB", + "data_structure": "Airbot_MMK2_stack_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (224 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:235" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "Airbot_MMK2_storage_diamond_ring": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_storage_diamond_ring", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "bedroom", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "diamond_ring", + "level1": "daily_necessities", + "level2": "diamond_ring", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "flip_top_paper_boxes", + "level1": "packaging", + "level2": "flip_top_paper_boxes", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "put the ring into the box." + ], + "sub_tasks": [ + { + "subtask": "Place the diamond ring in the box with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "End", + "subtask_index": 1 + }, + { + "subtask": "Close the lid of the box with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Grasp the diamond ring with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "null", + "subtask_index": 4 + } + ], + "atomic_actions": [ + "grasp", + "place", + "pick", + "turn" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 50, + "total_frames": 9131, + "fps": 30, + "total_tasks": 5, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "426.97 MB" + }, + "frame_num": 9131, + "dataset_size": "426.97 MB", + "data_structure": "Airbot_MMK2_storage_diamond_ring_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:49" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "Airbot_MMK2_storage_block_BBs": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_storage_block_BBs", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "scene_level1", + "level2": "scene_level2", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": null, + "level1": "bb_pellets", + "level2": "ball", + "level3": "bb_pellets", + "level4": null, + "level5": null + }, + { + "object_name": null, + "level1": "bowl", + "level2": "bowl", + "level3": "bowl", + "level4": null, + "level5": null + }, + { + "object_name": null, + "level1": "building_blocks", + "level2": "rectangular_building_blocks", + "level3": "building_blocks", + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "put the bb pellets and rectangular building blocks into the bowl." + ], + "sub_tasks": [ + { + "subtask": "Place the green cuboid block into the bowl with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Grasp the green cuboid block with the right gripper", + "subtask_index": 1 + }, + { + "subtask": "Grasp the bullet with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the bullet into the bowl with the left gripper", + "subtask_index": 3 + }, + { + "subtask": "End", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 + } + ], + "atomic_actions": [ + "pick", + "clip", + "place", + "lift" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 48, + "total_frames": 8640, + "fps": 30, + "total_tasks": 6, + "total_videos": 192, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "282.66 MB" + }, + "frame_num": 8640, + "dataset_size": "282.66 MB", + "data_structure": "Airbot_MMK2_storage_block_BBs_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:47" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "RMC-AIDA-L_stir_coffee": { + "path": "RMC-AIDA-L_stir_coffee", + "dataset_name": "stir_coffee", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "take", + "put", + "move", + "stir" + ], + "tasks": "Stir the coffee in the cup with the right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cup", + "level1": "tableware", + "level2": "cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "spoon", + "level1": "tableware", + "level2": "spoon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tablecloth", + "level1": "clothing", + "level2": "tablecloth", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-555807", + "dataset_size": "3.6GB", + "statistics": { + "total_episodes": 767, + "total_frames": 555807, + "total_tasks": 8, + "total_videos": 2301, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "ee676d19-f3d4-4c11-bb83-9a2474cf77c5", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Stir the coffee in the cup with the right gripper", + "Stir the coffee with right gripper", + "Abnormal", + "Move the cup with coffee to the center of view with the right gripper", + "Static", + "Move the cup in the center of the table with right gripper", + "Grasp the spoon with left gripper", + "Move the cup with coffee to the center of view with the left gripper", + "Stir the coffee in the cup with the left gripper", + "Grasp the spoon with right gripper", + "End", + "Stir the coffee with left gripper", + "Move the cup in the center of the table with left gripper", + "Pick up the spoon with the left gripper", + "Pick up the spoon with the right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "RMC-AIDA-L_stir_coffee_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_stir_coffee_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_stack_cubic_block": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_stack_cubic_block", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "bedroom", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "square_building_blocks", + "level1": "toys", + "level2": "square_building_blocks", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "pick up the building blocks with left and right hands respectively and place them on the blocks." + ], + "sub_tasks": [ + { + "subtask": "Grasp the blue build blocks with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "End", + "subtask_index": 1 + }, + { + "subtask": "Place the blue build blocks on the red build block with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the yellow build blocks on the orange build block with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Place the red build blocks on the center of the table with the left gripper", + "subtask_index": 4 + }, + { + "subtask": "Place the orange build blocks on the yellow build block with the right gripper", + "subtask_index": 5 + }, + { + "subtask": "Grasp the blue build blocks with the left gripper", + "subtask_index": 6 + }, + { + "subtask": "Grasp the red build blocks with the left gripper", + "subtask_index": 7 + }, + { + "subtask": "Abnormal", + "subtask_index": 8 + }, + { + "subtask": "Grasp the orange build blocks with the right gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the blue build blocks on the red build block with the left gripper", + "subtask_index": 10 + }, + { + "subtask": "Grasp the yellow build blocks with the right gripper", + "subtask_index": 11 + }, + { + "subtask": "null", + "subtask_index": 12 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 162, + "total_frames": 48691, + "fps": 30, + "total_tasks": 13, + "total_videos": 648, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "1.92 GB" + }, + "frame_num": 48691, + "dataset_size": "1.92 GB", + "data_structure": "Airbot_MMK2_stack_cubic_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (150 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:161" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "G1edu-u3_bowl_storage_grape_singletry": { + "path": "G1edu-u3_bowl_storage_grape_singletry", + "dataset_name": "bowl_storage_grape_singletry", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Grasp the grapes with left hand", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread", + "level1": "food", + "level2": "bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "bowl", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-119619", + "dataset_size": "2.4GB", + "statistics": { + "total_episodes": 242, + "total_frames": 119619, + "total_tasks": 1, + "total_videos": 726, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "40947b8c-339b-414f-94d9-6d5f24520362", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the grapes with left hand", + "Grasp the bread with the left gripper", + "Static", + "End", + "Place the bread the bowl with the left gripper", + "Place the grapes in the plate with left hand", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_bowl_storage_grape_singletry_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_bowl_storage_grape_singletry_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Agilex_Cobot_Magic_fold_towel_grey_tray": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_fold_towel_grey_tray", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "kitchen", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_tray", + "level1": "kitchen_supplies", + "level2": "green_tray", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "grey_square_towel", + "level1": "daily_necessities", + "level2": "grey_square_towel", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use both grippers to hold the edge of the towel and fold it forward.,use the right gripper to hold the right edge and fold it again,use a gripper to place the folded towel on the tray." + ], + "sub_tasks": [ + { + "subtask": "Fold the grey towel from left to right with left gripper", + "subtask_index": 0 + }, + { + "subtask": "Place the folded grey towel on the tray with the right gripper", + "subtask_index": 1 + }, + { + "subtask": "Abnormal", + "subtask_index": 2 + }, + { + "subtask": "Fold the grey towel from right to left with right gripper", + "subtask_index": 3 + }, + { + "subtask": "Fold the grey towel upwards", + "subtask_index": 4 + }, + { + "subtask": "End", + "subtask_index": 5 + }, + { + "subtask": "Place the folded grey towel on the tray with the left gripper", + "subtask_index": 6 + }, + { + "subtask": "null", + "subtask_index": 7 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower", + "fold" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 50, + "total_frames": 53308, + "fps": 30, + "total_tasks": 8, + "total_videos": 150, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "724.56 MB" + }, + "frame_num": 53308, + "dataset_size": "724.56 MB", + "data_structure": "Agilex_Cobot_Magic_fold_towel_grey_tray_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:49" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] + }, + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, + "RMC-AIDA-L_fruit_storage": { + "path": "RMC-AIDA-L_fruit_storage", + "dataset_name": "fruit_storage", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "End", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "grape", + "level1": "fruit", + "level2": "grape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "fruit", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "container", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "container", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-291785", + "dataset_size": "2.6GB", + "statistics": { + "total_episodes": 521, + "total_frames": 291785, + "total_tasks": 2, + "total_videos": 1563, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "8276223a-988a-4a97-8459-97f7c498cbb5", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Place the grape in the purple plate with left gripper", + "Grasp the grape with right gripper", + "Place the banana in the purple plate with right gripper", + "Place the banana in the blue bowl with left gripper", + "Grasp the banana with right gripper", + "Grasp the grape with left gripper", + "Place the bowl on the table with right gripper", + "Place the grape in the purple plate with right gripper", + "Place the banana in the purple plate with left gripper", + "Lift the blue bowl with right gripper", + "Grasp the blue bowl with right gripper", + "Static", + "Abnormal", + "Grasp the banana with left gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "RMC-AIDA-L_fruit_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_fruit_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "G1edu-u3_pick_up_the_bottled_water_as": { + "path": "G1edu-u3_pick_up_the_bottled_water_as", + "dataset_name": "pick_up_the_bottled_water_as", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick" + ], + "tasks": "Grasp the water bottle and lift it to the center of the view with right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bottle", + "level1": "container", + "level2": "bottle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "water", + "level1": "beverages", + "level2": "water", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-9753", + "dataset_size": "116.6MB", + "statistics": { + "total_episodes": 24, + "total_frames": 9753, + "total_tasks": 1, + "total_videos": 24, + "total_chunks": 1, + "chunks_size": 24, + "fps": 30 + }, + "dataset_uuid": "a5f8649a-a1ab-415a-9fd4-b48b04fab77e", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the water bottle and lift it to the center of the view with right gripper", + "Grasp the water bottle and lift it to the center of the view with left gripper", + "End", + "Abnormal", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_pick_up_the_bottled_water_as_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_pick_up_the_bottled_water_as_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "G1edu-u3_pick_up_the_tissue_box_ao": { + "path": "G1edu-u3_pick_up_the_tissue_box_ao", + "dataset_name": "pick_up_the_tissue_box_ao", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick" + ], + "tasks": "Grasp the tissue box and lift it to the center of the view with both gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "box", + "level1": "home_storage", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tissue", + "level1": "daily_necessities", + "level2": "tissue", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-21166", + "dataset_size": "273.8MB", + "statistics": { + "total_episodes": 29, + "total_frames": 21166, + "total_tasks": 1, + "total_videos": 29, + "total_chunks": 1, + "chunks_size": 29, + "fps": 30 + }, + "dataset_uuid": "3adf4438-1317-4ffb-9447-b4dd516c8559", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the tissue box and lift it to the center of the view with both gripper", + "End", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_pick_up_the_tissue_box_ao_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_pick_up_the_tissue_box_ao_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_move_cake_tape_measure": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_move_cake_tape_measure", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "other", + "level2": "courier_station", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "cake", + "level1": "bread", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "paper_boxes", + "level1": "home_storage", + "level2": "paper_boxes", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tape_measure", + "level1": "stationery", + "level2": "tape_measure", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "pick up the cake and tape measure with both hands and place them on the cardboard box." + ], + "sub_tasks": [ + { + "subtask": "Grasp the tape measure with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Place the bagged waffle on the carton with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Grasp the bagged waffle with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the tape measure on the carton with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "End", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 49, + "total_frames": 9381, + "fps": 30, + "total_tasks": 6, + "total_videos": 196, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "362.05 MB" + }, + "frame_num": 9381, + "dataset_size": "362.05 MB", + "data_structure": "Airbot_MMK2_move_cake_tape_measure_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:48" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "Airbot_MMK2_take_dog_doll": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_take_dog_doll", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "toy_dog", + "level1": "doll", + "level2": "toy_dog", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "lid", + "level1": "storage_utensils", + "level2": "lid", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "pick up the toy dog from the white lid by hand and place it on the table." + ], + "sub_tasks": [ + { + "subtask": "Place the puppy on the table with the left and right grippers", + "subtask_index": 0 + }, + { + "subtask": "Grasp the puppy with the left and right grippers", + "subtask_index": 1 + }, + { + "subtask": "End", + "subtask_index": 2 + }, + { + "subtask": "null", + "subtask_index": 3 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 50, + "total_frames": 6425, + "fps": 30, + "total_tasks": 4, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "253.22 MB" + }, + "frame_num": 6425, + "dataset_size": "253.22 MB", + "data_structure": "Airbot_MMK2_take_dog_doll_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:49" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "Galaxea_R1_Lite_storage_object_brown_basket": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Galaxea_R1_Lite_storage_object_brown_basket", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "brown_basket", + "level1": "basket", + "level2": "brown_basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "fruits", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_chemical_products", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_cup", + "level1": "cups", + "level2": "blue_cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_pot", + "level1": "cookware", + "level2": "blue_pot", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "toast_slices", + "level1": "bread", + "level2": "toast_slices", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_towel", + "level1": "towels", + "level2": "brown_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "can", + "level1": "snacks", + "level2": "can", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "coke(slim_can)", + "level1": "beverages", + "level2": "coke(slim_can)", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "potato_chips", + "level1": "snacks", + "level2": "potato_chips", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chocolate", + "level1": "snacks", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "compass", + "level1": "rulers", + "level2": "compass", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "block_pillar", + "level1": "building_blocks", + "level2": "block_pillar", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "egg_beater", + "level1": "spoons_and_spatulas", + "level2": "egg_beater", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eraser", + "level1": "erasers", + "level2": "eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "snacks", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_chemical_products", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_lemon", + "level1": "fruits", + "level2": "green_lemon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "fruits", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "power_strip", + "level1": "electrical_control_equipment", + "level2": "power_strip", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "round_bread", + "level1": "bread", + "level2": "round_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_chemical_products", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "square_building_blocks", + "level1": "building_blocks", + "level2": "square_building_blocks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tape", + "level1": "tape", + "level2": "tape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cake", + "level1": "bread", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "duck", + "level1": "doll", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ambrosial_yogurt", + "level1": "beverages", + "level2": "ambrosial_yogurt", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use a gripper to pick the target object and place on the brown basket." + ], + "sub_tasks": [ + { + "subtask": "Grasp the blue pot with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "Grasp the plugboard with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Place the potato chips into the basket with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the banana into the basket with the left gripper", + "subtask_index": 3 + }, + { + "subtask": "Place the coke into the basket with the left gripper", + "subtask_index": 4 + }, + { + "subtask": "Place the brown towel into the basket with the right gripper", + "subtask_index": 5 + }, + { + "subtask": "Place the peach slice into the basket with the right gripper", + "subtask_index": 6 + }, + { + "subtask": "Grasp the banana with the left gripper", + "subtask_index": 7 + }, + { + "subtask": "Place the round wooden block into the basket with the right gripper", + "subtask_index": 8 + }, + { + "subtask": "Place the duck toy into the basket with the right gripper", + "subtask_index": 9 + }, + { + "subtask": "Grasp the compasses with the right gripper", + "subtask_index": 10 + }, + { + "subtask": "Place the blackboard erasure into the basket with the left gripper", + "subtask_index": 11 + }, + { + "subtask": "Place the plugboard into the basket with the left gripper", + "subtask_index": 12 + }, + { + "subtask": "Place the square chewing gum into the basket with the right gripper", + "subtask_index": 13 + }, + { + "subtask": "Grasp the duck toy with the left gripper", + "subtask_index": 14 + }, + { + "subtask": "Grasp the cookie with the right gripper", + "subtask_index": 15 + }, + { + "subtask": "Grasp the blue cup with the left gripper", + "subtask_index": 16 + }, + { + "subtask": "Place the blackboard erasure into the basket with the right gripper", + "subtask_index": 17 + }, + { + "subtask": "Place the hard facial cleanser into the basket with the left gripper", + "subtask_index": 18 + }, + { + "subtask": "Place the round bread into the basket with the left gripper", + "subtask_index": 19 + }, + { + "subtask": "Place the square wooden block into the basket with the right gripper", + "subtask_index": 20 + }, + { + "subtask": "Grasp the soft facial cleanser with the left gripper", + "subtask_index": 21 + }, + { + "subtask": "Place the blue pot into the basket with the left gripper", + "subtask_index": 22 + }, + { + "subtask": "Grasp the peach slice with the right gripper", + "subtask_index": 23 + }, + { + "subtask": "Place the soft facial cleanser into the basket with the left gripper", + "subtask_index": 24 + }, + { + "subtask": "Grasp the square chewing gum with the left gripper", + "subtask_index": 25 + }, + { + "subtask": "Place the shower sphere into the basket with the left gripper", + "subtask_index": 26 + }, + { + "subtask": "Place the yogurt into the basket with the right gripper", + "subtask_index": 27 + }, + { + "subtask": "Grasp the chocolate cake with the right gripper", + "subtask_index": 28 + }, + { + "subtask": "Place the round wooden block into the basket with the left gripper", + "subtask_index": 29 + }, + { + "subtask": "Place the back scratcher into the basket with the left gripper", + "subtask_index": 30 + }, + { + "subtask": "Grasp the shower sphere with the left gripper", + "subtask_index": 31 + }, + { + "subtask": "Grasp the yogurt with the right gripper", + "subtask_index": 32 + }, + { + "subtask": "Grasp the tin with the left gripper", + "subtask_index": 33 + }, + { + "subtask": "Grasp the brown towel with the left gripper", + "subtask_index": 34 + }, + { + "subtask": "Grasp the hard facial cleanser with the left gripper", + "subtask_index": 35 + }, + { + "subtask": "Grasp the brown towel with the right gripper", + "subtask_index": 36 + }, + { + "subtask": "Grasp the back scratcher with the left gripper", + "subtask_index": 37 + }, + { + "subtask": "Place the hard facial cleanser into the basket with the right gripper", + "subtask_index": 38 + }, + { + "subtask": "Grasp the hard facial cleanser with the right gripper", + "subtask_index": 39 + }, + { + "subtask": "Place the green lemon into the basket with the right gripper", + "subtask_index": 40 + }, + { + "subtask": "Place the banana into the basket with the right gripper", + "subtask_index": 41 + }, + { + "subtask": "Place the compasses into the basket with the right gripper", + "subtask_index": 42 + }, + { + "subtask": "Place the compasses into the basket with the left gripper", + "subtask_index": 43 + }, + { + "subtask": "Grasp the green lemon with the right gripper", + "subtask_index": 44 + }, + { + "subtask": "Grasp the bread slice with the right gripper", + "subtask_index": 45 + }, + { + "subtask": "Place the duck toy into the basket with the left gripper", + "subtask_index": 46 + }, + { + "subtask": "Place the chocolate cake into the basket with the right gripper", + "subtask_index": 47 + }, + { + "subtask": "Place the shower sphere into the basket with the right gripper", + "subtask_index": 48 + }, + { + "subtask": "Grasp the potato chips with the left gripper", + "subtask_index": 49 + }, + { + "subtask": "Grasp the duck toy with the right gripper", + "subtask_index": 50 + }, + { + "subtask": "End", + "subtask_index": 51 + }, + { + "subtask": "Place the cookie into the basket with the right gripper", + "subtask_index": 52 + }, + { + "subtask": "Place the tape into the basket with the right gripper", + "subtask_index": 53 + }, + { + "subtask": "Grasp the blackboard erasure with the left gripper", + "subtask_index": 54 + }, + { + "subtask": "Place the bread slice into the basket with the left gripper", + "subtask_index": 55 + }, + { + "subtask": "Grasp the coke with the left gripper", + "subtask_index": 56 + }, + { + "subtask": "Grasp the round wooden block with the left gripper", + "subtask_index": 57 + }, + { + "subtask": "Place the tape into the basket with the left gripper", + "subtask_index": 58 + }, + { + "subtask": "Place the blue cup into the basket with the left gripper", + "subtask_index": 59 + }, + { + "subtask": "Grasp the compasses with the left gripper", + "subtask_index": 60 + }, + { + "subtask": "Grasp the blue pot with the right gripper", + "subtask_index": 61 + }, + { + "subtask": "Grasp the round bread with the right gripper", + "subtask_index": 62 + }, + { + "subtask": "Grasp the chocolate cake with the left gripper", + "subtask_index": 63 + }, + { + "subtask": "Place the blue pot into the basket with the right gripper", + "subtask_index": 64 + }, + { + "subtask": "Grasp the tin with the right gripper", + "subtask_index": 65 + }, + { + "subtask": "Place the tin into the basket with the left gripper", + "subtask_index": 66 + }, + { + "subtask": "Grasp the tape with the right gripper", + "subtask_index": 67 + }, + { + "subtask": "Place the square chewing gum into the basket with the left gripper", + "subtask_index": 68 + }, + { + "subtask": "Place the bread slice into the basket with the right gripper", + "subtask_index": 69 + }, + { + "subtask": "Grasp the tape with the left gripper", + "subtask_index": 70 + }, + { + "subtask": "Place the blue cup into the basket with the right gripper", + "subtask_index": 71 + }, + { + "subtask": "Grasp the square chewing gum with the right gripper", + "subtask_index": 72 + }, + { + "subtask": "Grasp the shower sphere with the right gripper", + "subtask_index": 73 + }, + { + "subtask": "Place the chocolate cake into the basket with the left gripper", + "subtask_index": 74 + }, + { + "subtask": "Grasp the bread slice with the left gripper", + "subtask_index": 75 + }, + { + "subtask": "Grasp the blackboard erasure with the right gripper", + "subtask_index": 76 + }, + { + "subtask": "Place the round bread into the basket with the right gripper", + "subtask_index": 77 + }, + { + "subtask": "Grasp the round bread with the left gripper", + "subtask_index": 78 + }, + { + "subtask": "Grasp the square wooden block with the left gripper", + "subtask_index": 79 + }, + { + "subtask": "Place the brown towel into the basket with the left gripper", + "subtask_index": 80 + }, + { + "subtask": "Grasp the blue cup with the right gripper", + "subtask_index": 81 + }, + { + "subtask": "Grasp the square wooden block with the right gripper", + "subtask_index": 82 + }, + { + "subtask": "Place the square wooden block into the basket with the left gripper", + "subtask_index": 83 + }, + { + "subtask": "Place the tin into the basket with the right gripper", + "subtask_index": 84 + }, + { + "subtask": "Grasp the round wooden block with the right gripper", + "subtask_index": 85 + }, + { + "subtask": "Grasp the banana with the right gripper", + "subtask_index": 86 + }, + { + "subtask": "null", + "subtask_index": 87 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Galaxea_R1_Lite" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_left_rgb", + "cam_head_right_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 102, + "total_frames": 18209, + "fps": 30, + "total_tasks": 88, + "total_videos": 408, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "899.78 MB" + }, + "frame_num": 18209, + "dataset_size": "899.78 MB", + "data_structure": "Galaxea_R1_Lite_storage_object_brown_basket_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (90 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:101" + }, + "features": { + "observation.images.cam_head_left_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + }, + "Agilex_Cobot_Magic_fold_towel_pink_tray": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_fold_towel_pink_tray", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "kitchen", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_tray", + "level1": "kitchen_supplies", + "level2": "green_tray", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pink_square_towel", + "level1": "daily_necessities", + "level2": "pink_square_towel", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use both grippers to hold the edge of the towel and fold it forward.,use the right gripper to hold the right edge and fold it again,use a gripper to place the folded towel on the tray." + ], + "sub_tasks": [ + { + "subtask": "Fold the pink towel upwards", + "subtask_index": 0 + }, + { + "subtask": "End", + "subtask_index": 1 + }, + { + "subtask": "Place the folded pink towel on the tray with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Fold the pink towel from right to left with right gripper", + "subtask_index": 3 + }, + { + "subtask": "null", + "subtask_index": 4 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower", "fold" ], "robot_name": [ @@ -74085,23 +78756,5163 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 63678, + "total_episodes": 50, + "total_frames": 63678, + "fps": 30, + "total_tasks": 5, + "total_videos": 150, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "766.05 MB" + }, + "frame_num": 63678, + "dataset_size": "766.05 MB", + "data_structure": "Agilex_Cobot_Magic_fold_towel_pink_tray_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:49" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] + }, + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, + "Agilex_Cobot_Magic_fold_towel": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_fold_towel", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "office_workspace", + "level2": "office", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "white_towel", + "level1": "daily_necessities", + "level2": "white_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange_towel", + "level1": "daily_necessities", + "level2": "orange_towel", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "fold the towels on the table." + ], + "sub_tasks": [ + { + "subtask": "Fold the white towel upwards with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Grasp the orange towel with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Grasp the orange towel with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Fold the orange towel upwards with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Fold the white towel upwards with the left gripper", + "subtask_index": 4 + }, + { + "subtask": "Fold the orange towel from right to left with right gripper", + "subtask_index": 5 + }, + { + "subtask": "Press on the orange towel with the left gripper", + "subtask_index": 6 + }, + { + "subtask": "Fold the white towel from right to left with right gripper", + "subtask_index": 7 + }, + { + "subtask": "End", + "subtask_index": 8 + }, + { + "subtask": "Arrange the orange towel with the right hand", + "subtask_index": 9 + }, + { + "subtask": "Grasp the white towel with the right gripper", + "subtask_index": 10 + }, + { + "subtask": "Fold the orange towel upwards with the left gripper", + "subtask_index": 11 + }, + { + "subtask": "Grasp the white towel with the left gripper", + "subtask_index": 12 + }, + { + "subtask": "Press on the white towel with the left gripper", + "subtask_index": 13 + }, + { + "subtask": "null", + "subtask_index": 14 + } + ], + "atomic_actions": [ + "grasp", + "fold", + "lift", + "lower" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 100, + "total_frames": 76161, + "fps": 30, + "total_tasks": 15, + "total_videos": 300, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "834.61 MB" + }, + "frame_num": 76161, + "dataset_size": "834.61 MB", + "data_structure": "Agilex_Cobot_Magic_fold_towel_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (88 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:99" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, + "Airbot_MMK2_sweep_peaper": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_sweep_peaper", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "paper_all", + "level1": "trash_bag", + "level2": "paper_all", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "small_broom", + "level1": "cleaning_supplies", + "level2": "small_broom", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "dustpan", + "level1": "cleaning_supplies", + "level2": "dustpan", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "hold the dustpan with left hand and the broom with right hand, and sweep the paper ball into the dustpan." + ], + "sub_tasks": [ + { + "subtask": "Place the broom on the table with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "End", + "subtask_index": 1 + }, + { + "subtask": "Grasp the broom from the table with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Grasp the dustpan from the table with the left gripper", + "subtask_index": 3 + }, + { + "subtask": "Place the dustpan on the table with the left gripper", + "subtask_index": 4 + }, + { + "subtask": "Abnormal", + "subtask_index": 5 + }, + { + "subtask": "Sweep the waste paper into the dustpan", + "subtask_index": 6 + }, + { + "subtask": "null", + "subtask_index": 7 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place", + "push" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 98, + "total_frames": 43106, + "fps": 30, + "total_tasks": 8, + "total_videos": 392, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "1.75 GB" + }, + "frame_num": 43106, + "dataset_size": "1.75 GB", + "data_structure": "Airbot_MMK2_sweep_peaper_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:97" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "Split_aloha_basket_storage_orange": { + "path": "Split_aloha_basket_storage_orange", + "dataset_name": "basket_storage_orange", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick" + ], + "tasks": "Place the basket in the center of view with left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange", + "level1": "fruit", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "basket", + "level1": "container", + "level2": "basket", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-293847", + "dataset_size": "6.1GB", + "statistics": { + "total_episodes": 642, + "total_frames": 293847, + "total_tasks": 1, + "total_videos": 1926, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "f478bc5f-0850-4241-9c9b-cea1674f8e67", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the basket in the center of view with left gripper", + "Place the orange in the basket with right gripper", + "Grasp the orange with right gripper", + "Grasp the basket with left gripper", + "End", + "Abnormal", + "Static", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Split_aloha_basket_storage_orange_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Split_aloha_basket_storage_orange_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_swap_bbs_block_plate": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_swap_bbs_block_plate", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "bb_pellets", + "level1": "toys", + "level2": "bb_pellets", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "block_pillar", + "level1": "toys", + "level2": "block_pillar", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "take the bb balls out of the plate with left hand and put them back in with right hand." + ], + "sub_tasks": [ + { + "subtask": "Place the green rectangular block on the plate with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Place the bullet on the table with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Graasp the green rectangular block with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Abnormal", + "subtask_index": 3 + }, + { + "subtask": "Graasp the bullet on the plate and with the left gripper", + "subtask_index": 4 + }, + { + "subtask": "End", + "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 50, + "total_frames": 9449, + "fps": 30, + "total_tasks": 7, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "295.93 MB" + }, + "frame_num": 9449, + "dataset_size": "295.93 MB", + "data_structure": "Airbot_MMK2_swap_bbs_block_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:49" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "Airbot_MMK2_storge_cake_ice_cream": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_storge_cake_ice_cream", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "kitchen", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "cake", + "level1": "food", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ice_cream", + "level1": "food", + "level2": "ice_cream", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "kitchen_supplies", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "hold the cake in left hand and the ice cream in right hand at the same time. put the cake into the bowl and place the ice cream on the plate." + ], + "sub_tasks": [ + { + "subtask": "Place the ice cream into the plate with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Grasp the ice cream with the right gripper", + "subtask_index": 1 + }, + { + "subtask": "Grasp the cake with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the cake into the bowl with the left gripper", + "subtask_index": 3 + }, + { + "subtask": "Static", + "subtask_index": 4 + }, + { + "subtask": "Grasp the cake from the table and with the left gripper", + "subtask_index": 5 + }, + { + "subtask": "End", + "subtask_index": 6 + }, + { + "subtask": "null", + "subtask_index": 7 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 48, + "total_frames": 8733, + "fps": 30, + "total_tasks": 8, + "total_videos": 192, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "351.41 MB" + }, + "frame_num": 8733, + "dataset_size": "351.41 MB", + "data_structure": "Airbot_MMK2_storge_cake_ice_cream_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:47" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "R1_Lite_build_blocks": { + "path": "R1_Lite_build_blocks", + "dataset_name": "build_blocks", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick" + ], + "tasks": "Place the arch-shaped block in the center of view with the left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blocks", + "level1": "toy", + "level2": "blocks", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-59296", + "dataset_size": "1.8GB", + "statistics": { + "total_episodes": 66, + "total_frames": 59296, + "total_tasks": 1, + "total_videos": 264, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "a56eff90-9eaa-4804-91c6-27e733d07c46", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the arch-shaped block in the center of view with the left gripper", + "Grasp the arch-shaped block with the left gripper", + "Place the triangle-shaped block onto the box-shaped block with the left grippe", + "Grasp the box-shaped block with the right gripper", + "Place the box-shaped block onto the arch-shaped block with the right gripper", + "Static", + "End", + "Grasp the triangle-shaped block with the left gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_build_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_build_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "G1edu-u3_pick_empty_bottle_ab": { + "path": "G1edu-u3_pick_empty_bottle_ab", + "dataset_name": "pick_empty_bottle_ab", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick" + ], + "tasks": "End", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "empty_bottle", + "level1": "mineral_water", + "level2": "empty_bottle", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-3290", + "dataset_size": "45.4MB", + "statistics": { + "total_episodes": 11, + "total_frames": 3290, + "total_tasks": 1, + "total_videos": 11, + "total_chunks": 1, + "chunks_size": 11, + "fps": 30 + }, + "dataset_uuid": "e8eb32c9-9e05-4189-9718-badce6a3ac0b", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Grasp the cola bottle and lift it to the center of the view with right gripper ", + "Grasp the cola bottle and lift it to the center of the view with left gripper  ", + "Abnormal", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_pick_empty_bottle_ab_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_pick_empty_bottle_ab_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "R1_Lite_open_and_close_microwave_oven": { + "path": "R1_Lite_open_and_close_microwave_oven", + "dataset_name": "open_and_close_microwave_oven", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place", + "push", + "pull", + "pressbutton" + ], + "tasks": "Put the microwave back in its original place", + "objects": [ + { + "object_name": "french_fries", + "level1": "food", + "level2": "french_fries", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "container", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "microwave", + "level1": "electric_appliance", + "level2": "microwave", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-183612", + "dataset_size": "7.7GB", + "statistics": { + "total_episodes": 117, + "total_frames": 183612, + "total_tasks": 1, + "total_videos": 351, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "1b797b37-4aab-43be-9f02-cfcf030be8ff", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Put the microwave back in its original place", + "Take the plate out of the microwave", + "Close the microwave oven", + "Open the microwave oven", + "Put the plate in the microwave oven", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_open_and_close_microwave_oven_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_open_and_close_microwave_oven_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_slide_block_onto_post": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_slide_block_onto_post", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "bedroom", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "early_education_toys", + "level1": "toys", + "level2": "early_education_toys", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "put the toy back to its original position with right hand." + ], + "sub_tasks": [ + { + "subtask": "Grasp the red cylindrical build blocks with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "End", + "subtask_index": 1 + }, + { + "subtask": "Place the green cylindrical build blocks on the 1st pillar of the block base with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the red cylindrical build blocks on the 1st pillar of the block base with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Abnormal", + "subtask_index": 4 + }, + { + "subtask": "Grasp the green cylindrical build blocks with the right gripper", + "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 189, + "total_frames": 36378, + "fps": 30, + "total_tasks": 7, + "total_videos": 756, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "1.31 GB" + }, + "frame_num": 36378, + "dataset_size": "1.31 GB", + "data_structure": "Airbot_MMK2_slide_block_onto_post_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (177 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:188" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "Cobot_Magic_pull_zipper": { + "path": "Cobot_Magic_pull_zipper", + "dataset_name": "pull_zipper", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pull" + ], + "tasks": "Pull the zipper with right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bag", + "level1": "container", + "level2": "bag", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-836317", + "dataset_size": "11.0GB", + "statistics": { + "total_episodes": 1868, + "total_frames": 836317, + "total_tasks": 6, + "total_videos": 5604, + "total_chunks": 2, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "3ede76ea-63fb-4de1-a796-a3809b4e09fa", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Pull the zipper with right gripper", + "Pull open the zipper with your left hand.", + "Grasp the bag with left gripper", + "Hold the bag with your right hand.", + "End", + "Zip up the bag with your left hand.", + "Zip up the bag with your right hand.", + "Grasp the bag with right gripper", + "Pull the zipper with left gripper", + "Grab the zipper with your left hand.", + "Discard.", + "Pull open the zipper with your right hand.", + "Hold the bag with your left hand.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_pull_zipper_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)", + "structure": "Cobot_Magic_pull_zipper_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)" + }, + "Airbot_MMK2_swap_bread_cake_plate": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_swap_bread_cake_plate", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "kitchen", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "cake", + "level1": "bread", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "long_bread", + "level1": "bread", + "level2": "long_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "take the cake out of the plate with left hand and put the bread in with right hand." + ], + "sub_tasks": [ + { + "subtask": "Grasp the bread with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Place the cake on the table with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Abnormal", + "subtask_index": 2 + }, + { + "subtask": "Place the bread into the plate with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "End", + "subtask_index": 4 + }, + { + "subtask": "Grasp the cake on the plate with the left gripper", + "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 41, + "total_frames": 11322, + "fps": 30, + "total_tasks": 7, + "total_videos": 164, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "475.32 MB" + }, + "frame_num": 11322, + "dataset_size": "475.32 MB", + "data_structure": "Airbot_MMK2_swap_bread_cake_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (29 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:40" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "Airbot_MMK2_storage_potato_pumpkin": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_storage_potato_pumpkin", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "kitchen", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "potato", + "level1": "vegetables", + "level2": "potato", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pumpkin", + "level1": "vegetables", + "level2": "pumpkin", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "storage_box", + "level1": "storage_utensils", + "level2": "storage_box", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "pick up the potato with left hand and put it in the storage box, and pick up the pumpkin with right hand and put it in the storage box." + ], + "sub_tasks": [ + { + "subtask": "Grasp the pumpkin with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Grasp the potato with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Place the potato into the left compartment of the storage box with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "End", + "subtask_index": 3 + }, + { + "subtask": "Place the pumpkin into the right compartment of the storage box with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 + } + ], + "atomic_actions": [ + "pick", + "place", + "grasp" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 41, + "total_frames": 9965, + "fps": 30, + "total_tasks": 6, + "total_videos": 164, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "288.93 MB" + }, + "frame_num": 9965, + "dataset_size": "288.93 MB", + "data_structure": "Airbot_MMK2_Airbot_MMK2_storage_potato_pumpkin_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (29 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:40" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "G1edu-u3_pullBowl_storage_bread_unordered_b": { + "path": "G1edu-u3_pullBowl_storage_bread_unordered_b", + "dataset_name": "pullBowl_storage_bread_unordered_b", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place", + "receive" + ], + "tasks": "Abnormal", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "faucet", + "level1": "water_dispenser", + "level2": "faucet", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cup", + "level1": "kitchen_supplies", + "level2": "cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange_juice", + "level1": "drink", + "level2": "orange_juice", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cola", + "level1": "beverages", + "level2": "cola", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "sprite", + "level1": "beverages", + "level2": "sprite", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-118843", + "dataset_size": "2.3GB", + "statistics": { + "total_episodes": 190, + "total_frames": 118843, + "total_tasks": 1, + "total_videos": 570, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "11ed8a7f-106a-4c08-98a4-0abdd1b97a50", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Abnormal", + "Place the long bread in pink bowl with left hand", + "End", + "Grasp the round bread with left hand", + "Grasp the long bread with left hand", + "Place the round bread in pink bowl with left hand", + "Move the pink bowl to the center of table with right hand", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_pullBowl_storage_bread_unordered_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_pullBowl_storage_bread_unordered_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "AIRBOT_MMK2_place_the_cake": { + "path": "AIRBOT_MMK2_place_the_cake", + "dataset_name": "place_the_cake", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the cake on the yellow cube block with the left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cake", + "level1": "food", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "container", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "building_blocks", + "level1": "toy", + "level2": "building_blocks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ice_cream", + "level1": "food", + "level2": "ice_cream", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "small_basin", + "level1": "container", + "level2": "small_basin", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "lid", + "level1": "container", + "level2": "lid", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "swiss_roll", + "level1": "food", + "level2": "swiss_roll", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-50001", + "dataset_size": "2.3GB", + "statistics": { + "total_episodes": 249, + "total_frames": 50001, + "total_tasks": 5, + "total_videos": 996, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "9dd9223d-2d05-47a8-bfd0-0884a7f571ed", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the cake on the yellow cube block with the left gripper", + "Grasp the bagged cake with the left gripper", + "Grasp the cake on the plate with the right gripper", + "Place the cake on the table with the right gripper", + "Abnormal", + "Static", + "Grasp the cake with the left gripper", + "Place the ice cream into the white basket with the right gripper", + "Place the cake into the white basket with the left gripper", + "Place the ice cream into the white basket with the left gripper", + "Grasp the ice cream with the right gripper", + "Grasp the ice cream with the left gripper", + "Grasp the bagged cake with the right gripper", + "Place the cake into the white plate with the left gripper", + "Place the bagged cake on the white plate with the left gripper", + "End", + "Grasp the cake with the right gripper", + "Place the bagged cake on the white plate with the right gripper", + "Place the cake on the blue cube block with the right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AIRBOT_MMK2_place_the_cake_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_place_the_cake_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Agilex_Cobot_Magic_move_object_beige_tablecloth": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_move_object_beige_tablecloth", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "commercial & convenience", + "level2": "supermarket", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "white_table_cloths", + "level1": "laboratory_supplies", + "level2": "white_table_cloths", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "waffle", + "level1": "food", + "level2": "waffle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_lemon", + "level1": "food", + "level2": "green_lemon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eggplant", + "level1": "food", + "level2": "eggplant", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chocolate", + "level1": "food", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mango", + "level1": "food", + "level2": "mango", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mint_candy", + "level1": "food", + "level2": "mint_candy", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mangosteen", + "level1": "food", + "level2": "mangosteen", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange", + "level1": "food", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread", + "level1": "food", + "level2": "bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "food", + "level2": "banana", + "level3": null, + "level4": "Fruit cake", + "level5": null + }, + { + "object_name": "cake", + "level1": "food", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "beef_cheeseburger", + "level1": "food", + "level2": "beef_cheeseburger", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "kitchen_supplies", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pan", + "level1": "kitchen_supplies", + "level2": "pan", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "small_teapot", + "level1": "kitchen_supplies", + "level2": "small_teapot", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "small_teacup", + "level1": "kitchen_supplies", + "level2": "small_teacup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "paper_ball", + "level1": "trash", + "level2": "paper_ball", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_square_towel", + "level1": "daily_necessities", + "level2": "brown_square_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "black_cylindrical_pen_holder", + "level1": "stationery", + "level2": "black_cylindrical_pen_holder", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pink_long_towel", + "level1": "daily_necessities", + "level2": "pink_long_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "whiteboard_eraser", + "level1": "stationery", + "level2": "whiteboard_eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_necessities", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "duck", + "level1": "toys", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "compass", + "level1": "stationery", + "level2": "compass", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "kitchen_supplies", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_long_towel", + "level1": "daily_necessities", + "level2": "blue_long_towel", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "the gripper move the object." + ], + "sub_tasks": [ + { + "subtask": "Grasp the XX with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Place the XX on the table with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Place the XX on the table with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Grasp the XX with the left gripper", + "subtask_index": 3 + }, + { + "subtask": "End", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 199, + "total_frames": 103966, "fps": 30, - "total_tasks": 5, - "total_videos": 150, + "total_tasks": 6, + "total_videos": 597, "total_chunks": 1, "chunks_size": 1000, "state_dim": 26, "action_dim": 26, "camera_views": 3, - "dataset_size": "766.05 MB" + "dataset_size": "1.34 GB" }, - "frame_num": 63678, - "dataset_size": "766.05 MB", - "data_structure": "Agilex_Cobot_Magic_fold_towel_pink_tray_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 103966, + "dataset_size": "1.34 GB", + "data_structure": "Agilex_Cobot_Magic_move_object_beige_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (187 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:49" + "train": "0:198" }, "features": { "observation.images.cam_head_rgb": { @@ -74210,7 +84021,556 @@ "action": { "dtype": "float32", "shape": [ - 26 + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] + }, + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, + "Airbot_MMK2_take_block_both_hands": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_take_block_both_hands", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "square_building_blocks", + "level1": "toys", + "level2": "square_building_blocks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "paper_boxes", + "level1": "home_storage", + "level2": "paper_boxes", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "pick up the building blocks simultaneously with both hands and take them out of the paper box." + ], + "sub_tasks": [ + { + "subtask": "End", + "subtask_index": 0 + }, + { + "subtask": "Grasp the blue cube block on the paper box with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Static", + "subtask_index": 2 + }, + { + "subtask": "Grasp the green cube block on the paper box with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Place the green cube block on the table with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "Place the blue cube block on the table with the left gripper", + "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 47, + "total_frames": 4793, + "fps": 30, + "total_tasks": 7, + "total_videos": 188, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "221.66 MB" + }, + "frame_num": 4793, + "dataset_size": "221.66 MB", + "data_structure": "Airbot_MMK2_take_block_both_hands_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:46" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 ], "names": [ "left_arm_joint_1_rad", @@ -74219,26 +84579,36 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -74278,17 +84648,17 @@ }, "subtask_annotation": { "names": null, - "dtype": "int32", "shape": [ 5 - ] + ], + "dtype": "int32" }, "scene_annotation": { "names": null, - "dtype": "int32", "shape": [ 1 - ] + ], + "dtype": "int32" }, "eef_sim_pose_state": { "names": [ @@ -74305,10 +84675,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_sim_pose_action": { "names": [ @@ -74325,130 +84695,70 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", "shape": [ 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" ], - "dtype": "float32", - "shape": [ - 2 - ] + "dtype": "int32" } }, "authors": { @@ -74470,9 +84780,130 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Agilex_Cobot_Magic_fold_towel": { + "Split_aloha_pour_rice": { + "path": "Split_aloha_pour_rice", + "dataset_name": "pour_rice", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "place", + "pick", + "grasp", + "pour" + ], + "tasks": "Grasp the cup with rice in it with left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rice", + "level1": "food", + "level2": "rice", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rice_container", + "level1": "container", + "level2": "rice_container", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cup", + "level1": "container", + "level2": "cup", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-504182", + "dataset_size": "4.9GB", + "statistics": { + "total_episodes": 740, + "total_frames": 504182, + "total_tasks": 1, + "total_videos": 2220, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "004a37ce-a00a-4547-a5a0-0538af2c8a99", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the cup with rice in it with left gripper", + "Place the bowl in the center of view with the right gripper", + "End", + "Place the cup on the table with the left gripper", + "Static", + "Pick up the bowl with the right gripper", + "Abnormal", + "Move the bowl in the center of view with right gripper", + "Move the bowl to the center of view with the right gripper", + "Pick up the cup with rice in it with the left gripper", + "Pour the rice from the cup into the bowl with the left gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Split_aloha_pour_rice_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Split_aloha_pour_rice_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Galaxea_R1_Lite_mix_blue_yellow_right": { "task_categories": [ "robotics" ], @@ -74502,7 +84933,7 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_fold_towel", + "dataset_name": "Galaxea_R1_Lite_mix_blue_yellow_right", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "office_workspace", @@ -74514,25 +84945,49 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "home_storage", - "level2": "table", + "object_name": "blue_pigment", + "level1": "materials", + "level2": "blue_pigment", "level3": null, "level4": null, "level5": null }, { - "object_name": "white_towel", - "level1": "daily_necessities", - "level2": "white_towel", + "object_name": "red_pigment", + "level1": "materials", + "level2": "red_pigment", "level3": null, "level4": null, "level5": null }, { - "object_name": "orange_towel", - "level1": "daily_necessities", - "level2": "orange_towel", + "object_name": "yellow_pigment", + "level1": "materials", + "level2": "yellow_pigment", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "test_tube_rack", + "level1": "holding_utensils", + "level2": "test_tube_rack", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "test_tubes", + "level1": "laboratory_supplies", + "level2": "test_tubes", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "beaker", + "level1": "holding_utensils", + "level2": "beaker", "level3": null, "level4": null, "level5": null @@ -74540,96 +84995,106 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "fold the towels on the table." + "pick up the test tube with yellow pigment and the test tube with blue pigment by grippers and pour them into the beaker." ], "sub_tasks": [ { - "subtask": "Fold the white towel upwards with the right gripper", + "subtask": "Abnormal", "subtask_index": 0 }, { - "subtask": "Grasp the orange towel with the left gripper", + "subtask": "Place the test tube into the bowl with right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the orange towel with the right gripper", + "subtask": "Grasp the red reagent with the left gripper", "subtask_index": 2 }, { - "subtask": "Fold the orange towel upwards with the right gripper", + "subtask": "Pour the blue reagent into the graduated cylinder with right gripper", "subtask_index": 3 }, { - "subtask": "Fold the white towel upwards with the left gripper", + "subtask": "Pick up the test tube containing the yellow reagent with right gripper", "subtask_index": 4 }, { - "subtask": "Fold the orange towel from right to left with right gripper", + "subtask": "Grasp the yellow reagent with the right gripper", "subtask_index": 5 }, { - "subtask": "Press on the orange towel with the left gripper", + "subtask": "Grasp the red reagent with the right gripper", "subtask_index": 6 }, { - "subtask": "Fold the white towel from right to left with right gripper", + "subtask": "Place the test tube into the bowl with the right gripper", "subtask_index": 7 }, { - "subtask": "End", + "subtask": "Pour the yellow reagent into the graduated cylinder with the right gripper", "subtask_index": 8 }, { - "subtask": "Arrange the orange towel with the right hand", + "subtask": "End", "subtask_index": 9 }, { - "subtask": "Grasp the white towel with the right gripper", + "subtask": "Pour the blue reagent into the graduated cylinder with the right gripper", "subtask_index": 10 }, { - "subtask": "Fold the orange towel upwards with the left gripper", + "subtask": "Pour the yellow reagent into the graduated cylinder with right gripper", "subtask_index": 11 }, { - "subtask": "Grasp the white towel with the left gripper", + "subtask": "Pick up the test tube containing the blue reagent with right gripper", "subtask_index": 12 }, { - "subtask": "Press on the white towel with the left gripper", + "subtask": "Pour the red reagent into the graduated cylinder with the right gripper", "subtask_index": 13 }, { - "subtask": "null", + "subtask": "end", "subtask_index": 14 + }, + { + "subtask": "Grasp the blue reagent with the right gripper", + "subtask_index": 15 + }, + { + "subtask": "null", + "subtask_index": 16 } ], "atomic_actions": [ "grasp", - "fold", - "lift", - "lower" + "pick", + "place", + "pour" ], "robot_name": [ - "Agilex_Cobot_Magic" + "Galaxea_R1_Lite" ], "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -74640,30 +85105,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 100, - "total_frames": 76161, + "total_episodes": 50, + "total_frames": 32657, "fps": 30, - "total_tasks": 15, - "total_videos": 300, + "total_tasks": 17, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "834.61 MB" + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "665.69 MB" }, - "frame_num": 76161, - "dataset_size": "834.61 MB", - "data_structure": "Agilex_Cobot_Magic_fold_towel_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (88 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 32657, + "dataset_size": "665.69 MB", + "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_mix_blue_yellow_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:99" + "train": "0:49" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -74672,8 +85137,31 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -74685,7 +85173,7 @@ "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -74695,7 +85183,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -74708,7 +85196,7 @@ "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -74718,7 +85206,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -74731,7 +85219,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 26 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -74740,32 +85228,20 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_gripper_open", + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -74774,26 +85250,14 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_gripper_open", + "right_gripper_open" ] }, "timestamp": { @@ -74945,6 +85409,26 @@ ], "dtype": "int32" }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, "gripper_mode_state": { "names": [ "left_gripper_mode", @@ -74984,26 +85468,6 @@ 2 ], "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] } }, "authors": { @@ -75025,9 +85489,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Airbot_MMK2_sweep_peaper": { + "Airbot_MMK2_storage_milk_tissue": { "task_categories": [ "robotics" ], @@ -75057,11 +85521,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_sweep_peaper", + "dataset_name": "Airbot_MMK2_storage_milk_tissue", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "living_room", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -75069,25 +85533,33 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "paper_all", - "level1": "trash_bag", - "level2": "paper_all", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "small_broom", - "level1": "cleaning_supplies", - "level2": "small_broom", + "object_name": "basket", + "level1": "home_storage", + "level2": "basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "dustpan", - "level1": "cleaning_supplies", - "level2": "dustpan", + "object_name": "milk", + "level1": "beverages", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tissue_paper", + "level1": "paper_towels", + "level2": "tissue_paper", "level3": null, "level4": null, "level5": null @@ -75095,35 +85567,35 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "hold the dustpan with left hand and the broom with right hand, and sweep the paper ball into the dustpan." + "pick up the tissue with left hand and put it in the basket, then pick up the milk with right hand and put it in the basket." ], "sub_tasks": [ { - "subtask": "Place the broom on the table with the right gripper", + "subtask": "Place the milk on the white basket with the right gripper", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Place the tissue on the white basket with the left gripper", "subtask_index": 1 }, { - "subtask": "Grasp the broom from the table with the right gripper", + "subtask": "Abnormal", "subtask_index": 2 }, { - "subtask": "Grasp the dustpan from the table with the left gripper", + "subtask": "Static", "subtask_index": 3 }, { - "subtask": "Place the dustpan on the table with the left gripper", + "subtask": "Grasp the milk with the right gripper", "subtask_index": 4 }, { - "subtask": "Abnormal", + "subtask": "End", "subtask_index": 5 }, { - "subtask": "Sweep the waste paper into the dustpan", + "subtask": "Grasp the tissue with the left gripper", "subtask_index": 6 }, { @@ -75134,8 +85606,7 @@ "atomic_actions": [ "grasp", "pick", - "place", - "push" + "place" ], "robot_name": [ "Airbot_MMK2" @@ -75169,23 +85640,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 98, - "total_frames": 43106, + "total_episodes": 48, + "total_frames": 10048, "fps": 30, "total_tasks": 8, - "total_videos": 392, + "total_videos": 192, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "1.75 GB" + "dataset_size": "350.34 MB" }, - "frame_num": 43106, - "dataset_size": "1.75 GB", - "data_structure": "Airbot_MMK2_sweep_peaper_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 10048, + "dataset_size": "350.34 MB", + "data_structure": "Airbot_MMK2_storage_milk_tissue_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:97" + "train": "0:47" }, "features": { "observation.images.cam_head_rgb": { @@ -75539,20 +86010,20 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Split_aloha_basket_storage_orange": { - "path": "Split_aloha_basket_storage_orange", - "dataset_name": "basket_storage_orange", + "R1_Lite_move_the_position_of_the_coffee_capsule": { + "path": "R1_Lite_move_the_position_of_the_coffee_capsule", + "dataset_name": "move_the_position_of_the_coffee_capsule", "robot_type": "", "end_effector_type": [ "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ - "grasp", "place", - "pick" + "pick", + "grasp" ], - "tasks": "Place the basket in the center of view with left gripper", + "tasks": "End", "objects": [ { "object_name": "table", @@ -75563,598 +86034,410 @@ "level5": null }, { - "object_name": "orange", - "level1": "fruit", - "level2": "orange", + "object_name": "pen", + "level1": "office_supplies", + "level2": "pen", "level3": null, "level4": null, "level5": null }, { - "object_name": "basket", - "level1": "container", - "level2": "basket", + "object_name": "banana", + "level1": "fruit", + "level2": "banana", "level3": null, "level4": null, "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-293847", - "dataset_size": "6.1GB", - "statistics": { - "total_episodes": 642, - "total_frames": 293847, - "total_tasks": 1, - "total_videos": 1926, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "f478bc5f-0850-4241-9c9b-cea1674f8e67", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the basket in the center of view with left gripper", - "Place the orange in the basket with right gripper", - "Grasp the orange with right gripper", - "Grasp the basket with left gripper", - "End", - "Abnormal", - "Static", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Split_aloha_basket_storage_orange_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Split_aloha_basket_storage_orange_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Airbot_MMK2_swap_bbs_block_plate": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "license": "apache-2.0", - "configs": [ + }, { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } - ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + "object_name": "bath_ball", + "level1": "daily_necessities", + "level2": "bath_ball", + "level3": null, + "level4": null, + "level5": null }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_swap_bbs_block_plate", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "household", - "level2": "living_room", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ { - "object_name": "bb_pellets", - "level1": "toys", - "level2": "bb_pellets", + "object_name": "bowl", + "level1": "container", + "level2": "bowl", "level3": null, "level4": null, "level5": null }, { "object_name": "plate", - "level1": "kitchen_supplies", + "level1": "container", "level2": "plate", "level3": null, "level4": null, "level5": null }, { - "object_name": "block_pillar", - "level1": "toys", - "level2": "block_pillar", + "object_name": "can", + "level1": "container", + "level2": "can", "level3": null, "level4": null, "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "take the bb balls out of the plate with left hand and put them back in with right hand." - ], - "sub_tasks": [ + }, { - "subtask": "Place the green rectangular block on the plate with the right gripper", - "subtask_index": 0 + "object_name": "eraser", + "level1": "office_supplies", + "level2": "eraser", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the bullet on the table with the left gripper", - "subtask_index": 1 + "object_name": "hard_cleanser", + "level1": "daily_necessities", + "level2": "hard_cleanser", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Graasp the green rectangular block with the right gripper", - "subtask_index": 2 + "object_name": "long_bread", + "level1": "food", + "level2": "long_bread", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Abnormal", - "subtask_index": 3 + "object_name": "peeler", + "level1": "tool", + "level2": "peeler", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Graasp the bullet on the plate and with the left gripper", - "subtask_index": 4 + "object_name": "block", + "level1": "toy", + "level2": "block", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "End", - "subtask_index": 5 + "object_name": "duck", + "level1": "toy", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "null", - "subtask_index": 6 - } - ], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "robot_name": [ - "Airbot_MMK2" - ], - "end_effector_type": "five_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" - ], - "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" - }, - "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" - ], - "statistics": { - "total_episodes": 50, - "total_frames": 9449, - "fps": 30, - "total_tasks": 7, - "total_videos": 200, - "total_chunks": 1, - "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "295.93 MB" - }, - "frame_num": 9449, - "dataset_size": "295.93 MB", - "data_structure": "Airbot_MMK2_swap_bbs_block_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", - "splits": { - "train": "0:49" - }, - "features": { - "observation.images.cam_head_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } + "object_name": "soap", + "level1": "daily_necessities", + "level2": "soap", + "level3": null, + "level4": null, + "level5": null }, - "observation.images.cam_left_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } + { + "object_name": "box", + "level1": "container", + "level2": "box", + "level3": null, + "level4": null, + "level5": null }, - "observation.images.cam_right_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } + { + "object_name": "basket", + "level1": "container", + "level2": "basket", + "level3": null, + "level4": null, + "level5": null }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } + { + "object_name": "cola", + "level1": "drink", + "level2": "cola", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "detergent", + "level1": "daily_necessities", + "level2": "detergent", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "egg_beater", + "level1": "electrical_appliances", + "level2": "egg_beater", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "towel", + "level1": "clothing", + "level2": "towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange", + "level1": "fruit", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "fruit", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "marker", + "level1": "office_supplies", + "level2": "marker", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rubiks_cube", + "level1": "toy", + "level2": "rubiks_cube", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread_slice", + "level1": "food", + "level2": "bread_slice", + "level3": null, + "level4": null, + "level5": null }, - "observation.state": { - "dtype": "float32", - "shape": [ - 36 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" - ] + { + "object_name": "brush", + "level1": "daily_necessities", + "level2": "brush", + "level3": null, + "level4": null, + "level5": null }, - "action": { - "dtype": "float32", - "shape": [ - 36 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" - ] + { + "object_name": "yogurt", + "level1": "drink", + "level2": "yogurt", + "level3": null, + "level4": null, + "level5": null }, - "timestamp": { - "dtype": "float32", - "shape": [ - 1 - ], - "names": null + { + "object_name": "power_strip", + "level1": "electric_appliance", + "level2": "power_strip", + "level3": null, + "level4": null, + "level5": null }, - "frame_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "object_name": "milk", + "level1": "drink", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null }, - "episode_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "object_name": "soda", + "level1": "drink", + "level2": "soda", + "level3": null, + "level4": null, + "level5": null }, - "index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "object_name": "lime", + "level1": "fruit", + "level2": "lime", + "level3": null, + "level4": null, + "level5": null }, - "task_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "object_name": "coffee_capsule", + "level1": "drink", + "level2": "coffee_capsule", + "level3": null, + "level4": null, + "level5": null }, - "subtask_annotation": { - "names": null, - "shape": [ - 5 - ], - "dtype": "int32" + { + "object_name": "dish", + "level1": "container", + "level2": "dish", + "level3": null, + "level4": null, + "level5": null }, - "scene_annotation": { - "names": null, - "shape": [ - 1 - ], - "dtype": "int32" + { + "object_name": "glass", + "level1": "furniture", + "level2": "glass", + "level3": null, + "level4": null, + "level5": null }, - "eef_sim_pose_state": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" + { + "object_name": "egg_yolk_pastry", + "level1": "food", + "level2": "egg_yolk_pastry", + "level3": null, + "level4": null, + "level5": null }, - "eef_sim_pose_action": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" + { + "object_name": "glasses_case", + "level1": "daily_necessities", + "level2": "glasses_case", + "level3": null, + "level4": null, + "level5": null }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "gum", + "level1": "daily_necessities", + "level2": "gum", + "level3": null, + "level4": null, + "level5": null }, - "eef_direction_action": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "tape", + "level1": "daily_necessities", + "level2": "tape", + "level3": null, + "level4": null, + "level5": null }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "soft_cleanser", + "level1": "daily_necessities", + "level2": "soft_cleanser", + "level3": null, + "level4": null, + "level5": null }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "chips", + "level1": "food", + "level2": "chips", + "level3": null, + "level4": null, + "level5": null }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "chocolate", + "level1": "food", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "cookie", + "level1": "food", + "level2": "cookie", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "spoon", + "level1": "tableware", + "level2": "spoon", + "level3": null, + "level4": null, + "level5": null } + ], + "operation_platform_height": 77.2, + "frame_range": "0-14334", + "dataset_size": "500.1MB", + "statistics": { + "total_episodes": 58, + "total_frames": 14334, + "total_tasks": 1, + "total_videos": 232, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "b0f7658f-e170-4911-afdf-9af621bfa0fe", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Grasp the coffee capsule with left gripper", + "Place the coffee capsule on the table with right gripper", + "Grasp the coffee capsule with right gripper", + "Static", + "Place the coffee capsule on the table with left gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" }, "authors": { "contributed_by": [ { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } ] }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_move_the_position_of_the_coffee_capsule_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_move_the_position_of_the_coffee_capsule_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_storge_cake_ice_cream": { + "Galaxea_R1_Lite_pour_liquid_mrable_bar_counter": { "task_categories": [ "robotics" ], @@ -76184,10 +86467,10 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storge_cake_ice_cream", + "dataset_name": "Galaxea_R1_Lite_pour_liquid_mrable_bar_counter", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", + "level1": "househhold", "level2": "kitchen", "level3": null, "level4": null, @@ -76196,33 +86479,41 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "cake", - "level1": "food", - "level2": "cake", + "object_name": "marble_bar_counter", + "level1": "furniture", + "level2": "marble_bar_counter", "level3": null, "level4": null, "level5": null }, { - "object_name": "ice_cream", - "level1": "food", - "level2": "ice_cream", + "object_name": "plastic_cup", + "level1": "cups", + "level2": "plastic_cup", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", + "object_name": "green_dish", + "level1": "plates", + "level3": "green_dish", + "level2": null, + "level4": null, + "level5": null + }, + { + "object_name": "pink_bowl", + "level1": "plastic_bowls", + "level2": "pink_bowl", "level3": null, "level4": null, "level5": null }, { - "object_name": "bowl", - "level1": "kitchen_supplies", - "level2": "bowl", + "object_name": "liquid", + "level1": "materials", + "level2": "liquid", "level3": null, "level4": null, "level5": null @@ -76230,63 +86521,172 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "hold the cake in left hand and the ice cream in right hand at the same time. put the cake into the bowl and place the ice cream on the plate." + "use a gripper to pick up the cup and pour the liquid into a bowl or tray." ], "sub_tasks": [ { - "subtask": "Place the ice cream into the plate with the right gripper", + "subtask": "Pour the orange juice into the pink bowl with left gripper", "subtask_index": 0 }, { - "subtask": "Grasp the ice cream with the right gripper", + "subtask": "Pour the black tea into the pink bowl with the right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the cake with the left gripper", + "subtask": "Grasp the glass of black tea with the right gripper", "subtask_index": 2 }, { - "subtask": "Place the cake into the bowl with the left gripper", + "subtask": "Pour the orange juice into the blue basin with right gripper", "subtask_index": 3 }, { - "subtask": "Static", + "subtask": "Left gripper", "subtask_index": 4 }, { - "subtask": "Grasp the cake from the table and with the left gripper", + "subtask": "Pour the tea into the pink bowl with left gripper", "subtask_index": 5 }, { - "subtask": "End", + "subtask": "Grasp the glass of orange juice with the right gripper", "subtask_index": 6 }, { - "subtask": "null", + "subtask": "Pour the tea into the pink bowl with right gripper", "subtask_index": 7 + }, + { + "subtask": "Pick up blue cup filled with tea with right gripper", + "subtask_index": 8 + }, + { + "subtask": "Pour the orange juice into the pink bowl with the right gripper", + "subtask_index": 9 + }, + { + "subtask": "Pour the orange juice into the pink bowl with the left gripper", + "subtask_index": 10 + }, + { + "subtask": "Pour the orange juice into the blue basin with left gripper", + "subtask_index": 11 + }, + { + "subtask": "Pour the orange juice into the green bowl with the right gripper", + "subtask_index": 12 + }, + { + "subtask": "Place blue cup with tea on the table with right gripper", + "subtask_index": 13 + }, + { + "subtask": "Pour the orange juice into the green bowl with the left gripper", + "subtask_index": 14 + }, + { + "subtask": "Pour the orange juice into the pink bowl with right gripper", + "subtask_index": 15 + }, + { + "subtask": "Place blue cup with orange juice on the table with right gripper", + "subtask_index": 16 + }, + { + "subtask": "Pour the black tea into the green bowl with the right gripper", + "subtask_index": 17 + }, + { + "subtask": "Pick up blue cup filled with orange juice with right gripper", + "subtask_index": 18 + }, + { + "subtask": "Place blue cup with orange juice on the table with left gripper", + "subtask_index": 19 + }, + { + "subtask": "Grasp the glass of black tea with the left gripper", + "subtask_index": 20 + }, + { + "subtask": "Pour the black tea into the green bowl with the left gripper", + "subtask_index": 21 + }, + { + "subtask": "Pour the tea into the blue basin with right gripper", + "subtask_index": 22 + }, + { + "subtask": "Pick up blue cup filled with tea with left gripper", + "subtask_index": 23 + }, + { + "subtask": "Pour the tea into the blue basin with left gripper", + "subtask_index": 24 + }, + { + "subtask": "Pour the black tea into the pink bowl with the left gripper", + "subtask_index": 25 + }, + { + "subtask": "Grasp the glass of orange juice with the left gripper", + "subtask_index": 26 + }, + { + "subtask": "Place the glass cup with the right gripper", + "subtask_index": 27 + }, + { + "subtask": "Pick up blue cup filled with orange juice with left gripper", + "subtask_index": 28 + }, + { + "subtask": "End", + "subtask_index": 29 + }, + { + "subtask": "Place blue cup with shrimp on the table with right gripper", + "subtask_index": 30 + }, + { + "subtask": "Right gripper", + "subtask_index": 31 + }, + { + "subtask": "Place blue cup with tea on the table with left gripper", + "subtask_index": 32 + }, + { + "subtask": "Place the glass cup with the left gripper", + "subtask_index": 33 + }, + { + "subtask": "null", + "subtask_index": 34 } ], "atomic_actions": [ "grasp", "pick", - "place" + "place", + "pour" ], "robot_name": [ - "Airbot_MMK2" + "Galaxea_R1_Lite" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -76303,30 +86703,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 48, - "total_frames": 8733, + "total_episodes": 100, + "total_frames": 43652, "fps": 30, - "total_tasks": 8, - "total_videos": 192, + "total_tasks": 35, + "total_videos": 400, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, + "state_dim": 14, + "action_dim": 14, "camera_views": 4, - "dataset_size": "351.41 MB" + "dataset_size": "1.70 GB" }, - "frame_num": 8733, - "dataset_size": "351.41 MB", - "data_structure": "Airbot_MMK2_storge_cake_ice_cream_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 43652, + "dataset_size": "1.70 GB", + "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_pour_liquid_mrable_bar_counter_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (88 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:47" + "train": "0:99" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -76335,8 +86735,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -76345,11 +86745,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_head_right_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -76358,8 +86758,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -76368,10 +86768,10 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -76381,7 +86781,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -76391,10 +86791,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -76404,7 +86804,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -76417,7 +86817,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -76432,36 +86832,14 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -76476,30 +86854,8 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "timestamp": { @@ -76650,6 +87006,66 @@ 2 ], "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" } }, "authors": { @@ -76671,22 +87087,22 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "R1_Lite_build_blocks": { - "path": "R1_Lite_build_blocks", - "dataset_name": "build_blocks", + "AIRBOT_MMK2_push_piston": { + "path": "AIRBOT_MMK2_push_piston", + "dataset_name": "push_piston", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "five_finger_hand" ], "scene_type": [], "atomic_actions": [ "grasp", - "place", - "pick" + "pick", + "place" ], - "tasks": "Place the arch-shaped block in the center of view with the left gripper", + "tasks": "Static", "objects": [ { "object_name": "table", @@ -76697,27 +87113,27 @@ "level5": null }, { - "object_name": "blocks", - "level1": "toy", - "level2": "blocks", + "object_name": "syringe", + "level1": "medical_supplies", + "level2": "syringe", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-59296", - "dataset_size": "1.8GB", + "frame_range": "0-15278", + "dataset_size": "598.3MB", "statistics": { - "total_episodes": 66, - "total_frames": 59296, + "total_episodes": 47, + "total_frames": 15278, "total_tasks": 1, - "total_videos": 264, + "total_videos": 188, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "a56eff90-9eaa-4804-91c6-27e733d07c46", + "dataset_uuid": "eef01bf4-6600-4121-9501-b7651d8e26c5", "language": [ "en", "zh" @@ -76726,110 +87142,13 @@ "robotics" ], "sub_tasks": [ - "Place the arch-shaped block in the center of view with the left gripper", - "Grasp the arch-shaped block with the left gripper", - "Place the triangle-shaped block onto the box-shaped block with the left grippe", - "Grasp the box-shaped block with the right gripper", - "Place the box-shaped block onto the arch-shaped block with the right gripper", "Static", + "Push the piston with the right gripper", "End", - "Grasp the triangle-shaped block with the left gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_build_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_build_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "G1edu-u3_pick_empty_bottle_ab": { - "path": "G1edu-u3_pick_empty_bottle_ab", - "dataset_name": "pick_empty_bottle_ab", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick" - ], - "tasks": "End", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "empty_bottle", - "level1": "mineral_water", - "level2": "empty_bottle", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-3290", - "dataset_size": "45.4MB", - "statistics": { - "total_episodes": 11, - "total_frames": 3290, - "total_tasks": 1, - "total_videos": 11, - "total_chunks": 1, - "chunks_size": 11, - "fps": 30 - }, - "dataset_uuid": "e8eb32c9-9e05-4189-9718-badce6a3ac0b", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "End", - "Grasp the cola bottle and lift it to the center of the view with right gripper ", - "Grasp the cola bottle and lift it to the center of the view with left gripper  ", - "Abnormal", + "Grasp the syringe with the left gripper", + "Grasp the piston with the right gripper", + "Place the syringe on the table with the left gripper", + "Lift the syringe with the left gripper", "null" ], "annotations": { @@ -76867,12 +87186,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "G1edu-u3_pick_empty_bottle_ab_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_pick_empty_bottle_ab_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_push_piston_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_push_piston_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "R1_Lite_open_and_close_microwave_oven": { - "path": "R1_Lite_open_and_close_microwave_oven", - "dataset_name": "open_and_close_microwave_oven", + "R1_Lite_clean_the_sink": { + "path": "R1_Lite_clean_the_sink", + "dataset_name": "clean_the_sink", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -76882,50 +87201,50 @@ "grasp", "pick", "place", - "push", - "pull", - "pressbutton" + "open", + "wipe", + "close" ], - "tasks": "Put the microwave back in its original place", + "tasks": "Pick up the cloth", "objects": [ { - "object_name": "french_fries", - "level1": "food", - "level2": "french_fries", + "object_name": "washbasin", + "level1": "container", + "level2": "washbasin", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "container", - "level2": "plate", + "object_name": "faucet", + "level1": "tool", + "level2": "faucet", "level3": null, "level4": null, "level5": null }, { - "object_name": "microwave", - "level1": "electric_appliance", - "level2": "microwave", + "object_name": "rag", + "level1": "clothing", + "level2": "rag", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-183612", - "dataset_size": "7.7GB", + "frame_range": "0-104391", + "dataset_size": "3.8GB", "statistics": { - "total_episodes": 117, - "total_frames": 183612, + "total_episodes": 120, + "total_frames": 104391, "total_tasks": 1, - "total_videos": 351, + "total_videos": 360, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "1b797b37-4aab-43be-9f02-cfcf030be8ff", + "dataset_uuid": "d29e439c-d1a9-41e5-ab2b-e264195dda27", "language": [ "en", "zh" @@ -76934,11 +87253,10 @@ "robotics" ], "sub_tasks": [ - "Put the microwave back in its original place", - "Take the plate out of the microwave", - "Close the microwave oven", - "Open the microwave oven", - "Put the plate in the microwave oven", + "Pick up the cloth", + "Turn on the faucet", + "Wipe the sink", + "Turn off the faucet", "null" ], "annotations": { @@ -76976,10 +87294,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_open_and_close_microwave_oven_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_open_and_close_microwave_oven_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "R1_Lite_clean_the_sink_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_clean_the_sink_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_slide_block_onto_post": { + "Airbot_MMK2_move_block": { "task_categories": [ "robotics" ], @@ -77009,7 +87327,7 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_slide_block_onto_post", + "dataset_name": "Airbot_MMK2_move_block", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", @@ -77020,6 +87338,14 @@ }, "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ + { + "object_name": "square_building_blocks", + "level1": "toys", + "level2": "square_building_blocks", + "level3": null, + "level4": null, + "level5": null + }, { "object_name": "early_education_toys", "level1": "toys", @@ -77031,36 +87357,32 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "put the toy back to its original position with right hand." + "put the square blocks into the circular toy." ], "sub_tasks": [ { - "subtask": "Grasp the red cylindrical build blocks with the right gripper", + "subtask": "Place the yellow block in the blue circle with the right gripper", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Grasp the yellow block with the left gripper", "subtask_index": 1 }, { - "subtask": "Place the green cylindrical build blocks on the 1st pillar of the block base with the right gripper", + "subtask": "Grasp the yellow block with the rightt gripper", "subtask_index": 2 }, { - "subtask": "Place the red cylindrical build blocks on the 1st pillar of the block base with the right gripper", + "subtask": "Place the yellow block in the blue circle with the left gripper", "subtask_index": 3 }, { - "subtask": "Abnormal", + "subtask": "End", "subtask_index": 4 }, - { - "subtask": "Grasp the green cylindrical build blocks with the right gripper", - "subtask_index": 5 - }, { "subtask": "null", - "subtask_index": 6 + "subtask_index": 5 } ], "atomic_actions": [ @@ -77100,23 +87422,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 189, - "total_frames": 36378, + "total_episodes": 50, + "total_frames": 7264, "fps": 30, - "total_tasks": 7, - "total_videos": 756, + "total_tasks": 6, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "1.31 GB" + "dataset_size": "300.47 MB" }, - "frame_num": 36378, - "dataset_size": "1.31 GB", - "data_structure": "Airbot_MMK2_slide_block_onto_post_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (177 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 7264, + "dataset_size": "300.47 MB", + "data_structure": "Airbot_MMK2_move_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:188" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -77470,9 +87792,9 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Cobot_Magic_pull_zipper": { - "path": "Cobot_Magic_pull_zipper", - "dataset_name": "pull_zipper", + "Tianqin_A2_box_storage_part": { + "path": "Tianqin_A2_box_storage_part", + "dataset_name": "box_storage_part", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -77480,9 +87802,10 @@ "scene_type": [], "atomic_actions": [ "grasp", - "pull" + "pick", + "place" ], - "tasks": "Pull the zipper with right gripper", + "tasks": "Place the data cable in the another box", "objects": [ { "object_name": "table", @@ -77492,6 +87815,152 @@ "level4": null, "level5": null }, + { + "object_name": "part", + "level1": "tool", + "level2": "part", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "box", + "level1": "home_storage", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-681531", + "dataset_size": "25.6GB", + "statistics": { + "total_episodes": 1104, + "total_frames": 681531, + "total_tasks": 1, + "total_videos": 3312, + "total_chunks": 2, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "7a1de37c-d8b0-476a-b503-99a0fd93cafd", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the data cable in the another box", + "Abnormal", + "Grasp the data cable", + "End", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Tianqin_A2_box_storage_part_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)", + "structure": "Tianqin_A2_box_storage_part_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)" + }, + "Cobot_Magic_food_packaging": { + "path": "Cobot_Magic_food_packaging", + "dataset_name": "food_packaging", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pull", + "place", + "pick" + ], + "tasks": "Place the banana into the package", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "lunch_box", + "level1": "container", + "level2": "lunch_box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "fruit", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread", + "level1": "food", + "level2": "bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pear", + "level1": "fruit", + "level2": "pear", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cucumber", + "level1": "vegetable", + "level2": "cucumber", + "level3": null, + "level4": null, + "level5": null + }, { "object_name": "bag", "level1": "container", @@ -77499,21 +87968,29 @@ "level3": null, "level4": null, "level5": null + }, + { + "object_name": "lemon", + "level1": "fruit", + "level2": "lemon", + "level3": null, + "level4": null, + "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-836317", - "dataset_size": "11.0GB", + "frame_range": "0-1470087", + "dataset_size": "25.8GB", "statistics": { - "total_episodes": 1868, - "total_frames": 836317, - "total_tasks": 6, - "total_videos": 5604, - "total_chunks": 2, + "total_episodes": 798, + "total_frames": 1470087, + "total_tasks": 2, + "total_videos": 2394, + "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "3ede76ea-63fb-4de1-a796-a3809b4e09fa", + "dataset_uuid": "d6b80e23-6bac-471f-a759-a475112b145d", "language": [ "en", "zh" @@ -77522,19 +87999,54 @@ "robotics" ], "sub_tasks": [ - "Pull the zipper with right gripper", - "Pull open the zipper with your left hand.", - "Grasp the bag with left gripper", - "Hold the bag with your right hand.", - "End", - "Zip up the bag with your left hand.", - "Zip up the bag with your right hand.", - "Grasp the bag with right gripper", - "Pull the zipper with left gripper", - "Grab the zipper with your left hand.", + "Place the banana into the package", + "Right hand grabs cucumber.", + "Right hand moves lunch bag.", + "Right hand grabs lunch bag.", "Discard.", - "Pull open the zipper with your right hand.", - "Hold the bag with your left hand.", + "end", + "Right hand grabs banana.", + "Grasp the box with the right gripper", + "Grasp the package with the left gripper", + "Left hand grabs lunch box.", + "Hold the package and Pick up the bread", + "Left hand secures lunch bag.", + "Abnormal", + "Place the lemon into the package with the right gripper", + "Grasp the banana with the right gripper", + "Right hand grabs lunch box.", + "Left hand stands lunch bag upright.", + "Place the pear into the package with the right gripper", + "Place the pear into the package", + "Grasp the cucumber with the right gripper", + "Place the box into the package", + "Hold the package and Pick up the box", + "Hold the package and Pick up the cucumber", + "Right hand stands lunch bag upright.", + "Pick up the banana", + "Right hand grabs lemon.", + "Place the banana into the package with the right gripper", + "Place the bread into the package", + "Right hand places into bag.", + "Left hand moves lunch bag.", + "Grasp the lemon with the right gripper", + "Right hand grabs bread.", + "Pick up the pear", + "zip up the zipper to close the bag", + "Pick up the box", + "Right hand grabs pear.", + "Right hand pulls zipper.", + "Pick up the bread", + "Grasp the pear with the right gripper", + "Pick up the cucumber", + "Hold the package and Pick up the pear", + "Place the box into the package with the right gripper", + "Place the cucumber into the package with the right gripper", + "Right hand receives lunch box.", + "Hold the package and Pick up the banana", + "Left hand grabs lunch bag.", + "Left hand lifts lunch box.", + "Place the cucumber into the package", "null" ], "annotations": { @@ -77572,10 +88084,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Cobot_Magic_pull_zipper_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)", - "structure": "Cobot_Magic_pull_zipper_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)" + "data_schema": "Cobot_Magic_food_packaging_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_food_packaging_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_swap_bread_cake_plate": { + "Airbot_MMK2_unscrew_bottle_cap": { "task_categories": [ "robotics" ], @@ -77605,11 +88117,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_swap_bread_cake_plate", + "dataset_name": "Airbot_MMK2_unscrew_bottle_cap", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "kitchen", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -77617,25 +88129,9 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "cake", - "level1": "bread", - "level2": "cake", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "long_bread", - "level1": "bread", - "level2": "long_bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", + "object_name": "beverages", + "level1": "beverages", + "level2": "beverages", "level3": null, "level4": null, "level5": null @@ -77643,42 +88139,39 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "take the cake out of the plate with left hand and put the bread in with right hand." + "pick up the bottle with left hand and unscrew the cap with right hand." ], "sub_tasks": [ { - "subtask": "Grasp the bread with the right gripper", + "subtask": "Grasp the bottle with the left gripper", "subtask_index": 0 }, { - "subtask": "Place the cake on the table with the left gripper", + "subtask": "Place the bottle on the table with the left gripper", "subtask_index": 1 }, { - "subtask": "Abnormal", + "subtask": "Lift the bottle up with the left gripper", "subtask_index": 2 }, { - "subtask": "Place the bread into the plate with the right gripper", + "subtask": "Unscrew the bottle cap with the right hand while holding the bottle with the left hand", "subtask_index": 3 }, { "subtask": "End", "subtask_index": 4 }, - { - "subtask": "Grasp the cake on the plate with the left gripper", - "subtask_index": 5 - }, { "subtask": "null", - "subtask_index": 6 + "subtask_index": 5 } ], "atomic_actions": [ "grasp", "pick", - "place" + "place", + "turn" ], "robot_name": [ "Airbot_MMK2" @@ -77712,23 +88205,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 41, - "total_frames": 11322, + "total_episodes": 50, + "total_frames": 16965, "fps": 30, - "total_tasks": 7, - "total_videos": 164, + "total_tasks": 6, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, - "camera_views": 4, - "dataset_size": "475.32 MB" - }, - "frame_num": 11322, - "dataset_size": "475.32 MB", - "data_structure": "Airbot_MMK2_swap_bread_cake_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (29 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "camera_views": 4, + "dataset_size": "632.68 MB" + }, + "frame_num": 16965, + "dataset_size": "632.68 MB", + "data_structure": "Airbot_MMK2_unscrew_bottle_cap_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:40" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -78082,7 +88575,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_storage_potato_pumpkin": { + "agilex_cobot_magic_pass_object_right_to_left_white_tablecloth": { "task_categories": [ "robotics" ], @@ -78112,11 +88605,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_potato_pumpkin", + "dataset_name": "agilex_cobot_magic_pass_object_right_to_left_white_tablecloth", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "kitchen", + "level1": "commercial_convenience", + "level2": "supermarket", "level3": null, "level4": null, "level5": null @@ -78124,25 +88617,145 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "potato", - "level1": "vegetables", - "level2": "potato", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "pumpkin", - "level1": "vegetables", - "level2": "pumpkin", + "object_name": "ambrosial_yogurt", + "level1": "food", + "level2": "ambrosial_yogurt", "level3": null, "level4": null, "level5": null }, { - "object_name": "storage_box", - "level1": "storage_utensils", - "level2": "storage_box", + "object_name": "banana", + "level1": "food", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "long_bread", + "level1": "food", + "level2": "long_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "milk", + "level1": "food", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yogurt", + "level1": "food", + "level2": "yogurt", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "grape", + "level1": "food", + "level2": "grape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ham_sausage", + "level1": "food", + "level2": "ham_sausage", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eggplant", + "level1": "food", + "level2": "eggplant", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eyeglass_case", + "level1": "laboratory_supplies", + "level2": "eyeglass_case", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rubik's_cube", + "level1": "toys", + "level2": "rubik's_cube", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "purple_trash_bag", + "level1": "trash", + "level2": "purple_trash_bag", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cleanser", + "level1": "daily_necessities", + "level2": "cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_necessities", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "whiteboard_eraser", + "level1": "stationery", + "level2": "whiteboard_eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "candle", + "level1": "daily_necessities", + "level2": "candle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "white_table_cloths", + "level1": "laboratory_supplies", + "level2": "white_table_cloths", "level3": null, "level4": null, "level5": null @@ -78150,61 +88763,205 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the potato with left hand and put it in the storage box, and pick up the pumpkin with right hand and put it in the storage box." + "use the right gripper to pick up the item and transfer it from the right gripper to the left gripper." ], "sub_tasks": [ { - "subtask": "Grasp the pumpkin with the right gripper", + "subtask": "The left gripper places milk on the left side of the table", "subtask_index": 0 }, { - "subtask": "Grasp the potato with the left gripper", + "subtask": "\nPass the square chewing gun to the left gripper\n", "subtask_index": 1 }, { - "subtask": "Place the potato into the left compartment of the storage box with the left gripper", + "subtask": "The left gripper places Rubik's Cube on the left side of the table", "subtask_index": 2 }, { - "subtask": "End", + "subtask": "\nPass the yogurt to the left gripper\n", "subtask_index": 3 }, { - "subtask": "Place the pumpkin into the right compartment of the storage box with the right gripper", + "subtask": "The left gripper places grey towel on the left side of the table", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "End", "subtask_index": 5 + }, + { + "subtask": "Use the right gripper to grab the banana on the right side of the table", + "subtask_index": 6 + }, + { + "subtask": "The left gripper places bananal on the left side of the table", + "subtask_index": 7 + }, + { + "subtask": "\nPass the shower spherer to the left gripper\n", + "subtask_index": 8 + }, + { + "subtask": "The left gripper places yogurt on the left side of the table", + "subtask_index": 9 + }, + { + "subtask": "\nPass the eggplant to the left gripper\n", + "subtask_index": 10 + }, + { + "subtask": "\nPass the eyeglass case to the left gripper\n", + "subtask_index": 11 + }, + { + "subtask": "Use the right gripper to grab the banana on the right side of the table", + "subtask_index": 12 + }, + { + "subtask": "The left gripper places eggplant on the left side of the table", + "subtask_index": 13 + }, + { + "subtask": "\nPass the eggplant to the left gripper\n", + "subtask_index": 14 + }, + { + "subtask": "Use the right gripper to grab the milk on the right side of the table", + "subtask_index": 15 + }, + { + "subtask": "The left gripper places banana on the left side of the table", + "subtask_index": 16 + }, + { + "subtask": "\nPass the Rubik's Cube to the left gripper\n", + "subtask_index": 17 + }, + { + "subtask": "Use the right gripper to grab the eggplant on the right side of the table", + "subtask_index": 18 + }, + { + "subtask": "Use the right gripper to grab the square chewing gun on the right side of the table", + "subtask_index": 19 + }, + { + "subtask": "Use the right gripper to grab the banana the on right side of the table", + "subtask_index": 20 + }, + { + "subtask": "Use the right gripper to grab the shower sphere on the right side of the table", + "subtask_index": 21 + }, + { + "subtask": "Use the right gripper to grab the bananal on the right side of the table", + "subtask_index": 22 + }, + { + "subtask": "The left gripper places shower sphere on the left side of the table", + "subtask_index": 23 + }, + { + "subtask": "\nPass the grey towel to the left gripper\n", + "subtask_index": 24 + }, + { + "subtask": "Use the right gripper to grab the blue blackboard erasure on the right side of the table", + "subtask_index": 25 + }, + { + "subtask": "\nPass the banana to the left gripper\n", + "subtask_index": 26 + }, + { + "subtask": "Use the right gripper to grab the purple garbage bag on the right side of the table", + "subtask_index": 27 + }, + { + "subtask": "\nPass the purple garbage bag to the left gripper\n", + "subtask_index": 28 + }, + { + "subtask": "The left gripper places blue blackboard erasure on the left side of the table", + "subtask_index": 29 + }, + { + "subtask": "Use the right gripper to grab the eggplant on the right side of the table", + "subtask_index": 30 + }, + { + "subtask": "Use the right gripper to grab the square chewing gum on the right side of the table", + "subtask_index": 31 + }, + { + "subtask": "The left gripper places square chewing gun on the left side of the table", + "subtask_index": 32 + }, + { + "subtask": "\nPass the shower sphere to the left gripper\n", + "subtask_index": 33 + }, + { + "subtask": "The left gripper places eyeglass case on the left side of the table", + "subtask_index": 34 + }, + { + "subtask": "Use the right gripper to grab the yogurt on the right side of the table", + "subtask_index": 35 + }, + { + "subtask": "\nPass the blue blackboard erasure to the left gripper\n", + "subtask_index": 36 + }, + { + "subtask": "Use the right gripper to grab the eyeglass case on the right side of the table", + "subtask_index": 37 + }, + { + "subtask": "Use the right gripper to grab the Rubik's Cube on the right side of the table", + "subtask_index": 38 + }, + { + "subtask": "\nPass the milk to the left gripper\n", + "subtask_index": 39 + }, + { + "subtask": "The left gripper places purple garbage bag on the left side of the table", + "subtask_index": 40 + }, + { + "subtask": "null", + "subtask_index": 41 } ], "atomic_actions": [ - "pick", - "place", - "grasp" + "grasp", + "lift", + "lower", + "handover", + "takeover" ], "robot_name": [ - "Airbot_MMK2" + "agilex_cobot_magic" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -78215,23 +88972,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 41, - "total_frames": 9965, + "total_episodes": 99, + "total_frames": 52998, "fps": 30, - "total_tasks": 6, - "total_videos": 164, + "total_tasks": 42, + "total_videos": 297, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "288.93 MB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "508.79 MB" }, - "frame_num": 9965, - "dataset_size": "288.93 MB", - "data_structure": "Airbot_MMK2_Airbot_MMK2_storage_potato_pumpkin_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (29 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 52998, + "dataset_size": "508.79 MB", + "data_structure": "Agilex_Cobot_Magic_pass_object_right_to_left_white_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:40" + "train": "0:98" }, "features": { "observation.images.cam_head_rgb": { @@ -78303,33 +89060,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, "observation.state": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -78338,42 +89072,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -78382,36 +89106,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -78451,17 +89165,17 @@ }, "subtask_annotation": { "names": null, + "dtype": "int32", "shape": [ 5 - ], - "dtype": "int32" + ] }, "scene_annotation": { "names": null, + "dtype": "int32", "shape": [ 1 - ], - "dtype": "int32" + ] }, "eef_sim_pose_state": { "names": [ @@ -78478,10 +89192,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_sim_pose_action": { "names": [ @@ -78498,70 +89212,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", "shape": [ 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" ], - "dtype": "int32" + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } }, "authors": { @@ -78583,23 +89357,22 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "G1edu-u3_pullBowl_storage_bread_unordered_b": { - "path": "G1edu-u3_pullBowl_storage_bread_unordered_b", - "dataset_name": "pullBowl_storage_bread_unordered_b", + "Split_aloha_zip_up_the_document_bag": { + "path": "Split_aloha_zip_up_the_document_bag", + "dataset_name": "zip_up_the_document_bag", "robot_type": "", "end_effector_type": [ - "five_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ "grasp", - "pick", - "place", - "receive" + "pull", + "place" ], - "tasks": "Abnormal", + "tasks": "Pulling up the zipper on the transparent plastic bag with the left gripper to close it", "objects": [ { "object_name": "table", @@ -78610,59 +89383,27 @@ "level5": null }, { - "object_name": "faucet", - "level1": "water_dispenser", - "level2": "faucet", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cup", - "level1": "kitchen_supplies", - "level2": "cup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "orange_juice", - "level1": "drink", - "level2": "orange_juice", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cola", - "level1": "beverages", - "level2": "cola", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "sprite", - "level1": "beverages", - "level2": "sprite", + "object_name": "document_bag", + "level1": "office_supplies", + "level2": "document_bag", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-118843", - "dataset_size": "2.3GB", + "frame_range": "0-234815", + "dataset_size": "2.7GB", "statistics": { - "total_episodes": 190, - "total_frames": 118843, - "total_tasks": 1, - "total_videos": 570, + "total_episodes": 495, + "total_frames": 234815, + "total_tasks": 4, + "total_videos": 1485, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "11ed8a7f-106a-4c08-98a4-0abdd1b97a50", + "dataset_uuid": "e06c5784-df23-4a4a-b86d-adc2bcf12bae", "language": [ "en", "zh" @@ -78671,13 +89412,13 @@ "robotics" ], "sub_tasks": [ - "Abnormal", - "Place the long bread in pink bowl with left hand", + "Pulling up the zipper on the transparent plastic bag with the left gripper to close it", + "Pulling up the zipper on the transparent plastic bag with the right gripper to close it", + "Static", "End", - "Grasp the round bread with left hand", - "Grasp the long bread with left hand", - "Place the round bread in pink bowl with left hand", - "Move the pink bowl to the center of table with right hand", + "Pick up the transparent plastic bag with the left gripper", + "Pick up the transparent plastic bag with the right gripper", + "Place the transparent plastic bag", "null" ], "annotations": { @@ -78715,12 +89456,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "G1edu-u3_pullBowl_storage_bread_unordered_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_pullBowl_storage_bread_unordered_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Split_aloha_zip_up_the_document_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Split_aloha_zip_up_the_document_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "AIRBOT_MMK2_place_the_cake": { - "path": "AIRBOT_MMK2_place_the_cake", - "dataset_name": "place_the_cake", + "AIRBOT_MMK2_store_wet_wipes_and_bowls": { + "path": "AIRBOT_MMK2_store_wet_wipes_and_bowls", + "dataset_name": "store_wet_wipes_and_bowls", "robot_type": "", "end_effector_type": [ "five_finger_hand" @@ -78728,10 +89469,10 @@ "scene_type": [], "atomic_actions": [ "grasp", - "pick", - "place" + "open", + "hold" ], - "tasks": "Place the cake on the yellow cube block with the left gripper", + "tasks": "Place the bowl on the plate with the right gripper", "objects": [ { "object_name": "table", @@ -78742,419 +89483,158 @@ "level5": null }, { - "object_name": "cake", - "level1": "food", - "level2": "cake", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", + "object_name": "bowl", "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "building_blocks", - "level1": "toy", - "level2": "building_blocks", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "ice_cream", - "level1": "food", - "level2": "ice_cream", + "level2": "bowl", "level3": null, "level4": null, "level5": null }, { - "object_name": "small_basin", - "level1": "container", - "level2": "small_basin", + "object_name": "wet_tissue", + "level1": "daily_necessities", + "level2": "wet_tissue", "level3": null, "level4": null, "level5": null }, { - "object_name": "lid", + "object_name": "square_plate", "level1": "container", - "level2": "lid", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "swiss_roll", - "level1": "food", - "level2": "swiss_roll", + "level2": "square_plate", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-50001", - "dataset_size": "2.3GB", + "frame_range": "0-13058", + "dataset_size": "425.0MB", "statistics": { - "total_episodes": 249, - "total_frames": 50001, - "total_tasks": 5, - "total_videos": 996, + "total_episodes": 50, + "total_frames": 13058, + "total_tasks": 1, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "9dd9223d-2d05-47a8-bfd0-0884a7f571ed", + "dataset_uuid": "de8bf908-6d6b-42fb-9298-29fa21a3fc8d", "language": [ "en", "zh" ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the cake on the yellow cube block with the left gripper", - "Grasp the bagged cake with the left gripper", - "Grasp the cake on the plate with the right gripper", - "Place the cake on the table with the right gripper", - "Abnormal", - "Static", - "Grasp the cake with the left gripper", - "Place the ice cream into the white basket with the right gripper", - "Place the cake into the white basket with the left gripper", - "Place the ice cream into the white basket with the left gripper", - "Grasp the ice cream with the right gripper", - "Grasp the ice cream with the left gripper", - "Grasp the bagged cake with the right gripper", - "Place the cake into the white plate with the left gripper", - "Place the bagged cake on the white plate with the left gripper", - "End", - "Grasp the cake with the right gripper", - "Place the bagged cake on the white plate with the right gripper", - "Place the cake on the blue cube block with the right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_place_the_cake_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_place_the_cake_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Agilex_Cobot_Magic_move_object_beige_tablecloth": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } - ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_move_object_beige_tablecloth", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "commercial & convenience", - "level2": "supermarket", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ - { - "object_name": "table", - "level1": "home_storage", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "white_table_cloths", - "level1": "laboratory_supplies", - "level2": "white_table_cloths", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "waffle", - "level1": "food", - "level2": "waffle", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "green_lemon", - "level1": "food", - "level2": "green_lemon", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "eggplant", - "level1": "food", - "level2": "eggplant", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chewing_gum", - "level1": "food", - "level2": "chewing_gum", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chocolate", - "level1": "food", - "level2": "chocolate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mango", - "level1": "food", - "level2": "mango", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chewing_gum", - "level1": "food", - "level2": "chewing_gum", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mint_candy", - "level1": "food", - "level2": "mint_candy", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mangosteen", - "level1": "food", - "level2": "mangosteen", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "orange", - "level1": "food", - "level2": "orange", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bread", - "level1": "food", - "level2": "bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "food", - "level2": "banana", - "level3": null, - "level4": "Fruit cake", - "level5": null - }, - { - "object_name": "cake", - "level1": "food", - "level2": "cake", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "beef_cheeseburger", - "level1": "food", - "level2": "beef_cheeseburger", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bowl", - "level1": "kitchen_supplies", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pan", - "level1": "kitchen_supplies", - "level2": "pan", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "small_teapot", - "level1": "kitchen_supplies", - "level2": "small_teapot", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "small_teacup", - "level1": "kitchen_supplies", - "level2": "small_teacup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "paper_ball", - "level1": "trash", - "level2": "paper_ball", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "brown_square_towel", - "level1": "daily_necessities", - "level2": "brown_square_towel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "black_cylindrical_pen_holder", - "level1": "stationery", - "level2": "black_cylindrical_pen_holder", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pink_long_towel", - "level1": "daily_necessities", - "level2": "pink_long_towel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "whiteboard_eraser", - "level1": "stationery", - "level2": "whiteboard_eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mentholatum_facial_cleanser", - "level1": "daily_necessities", - "level2": "mentholatum_facial_cleanser", - "level3": null, - "level4": null, - "level5": null - }, + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the bowl on the plate with the right gripper", + "Static", + "Grasp the wet wipes with the left gripper", + "Grasp the bowl with the right gripper", + "End", + "Place the wet wipes into the bowl with the left gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AIRBOT_MMK2_store_wet_wipes_and_bowls_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_store_wet_wipes_and_bowls_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_take_part_both_hands": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ { - "object_name": "duck", - "level1": "toys", - "level2": "duck", - "level3": null, - "level4": null, - "level5": null + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_take_part_both_hands", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ { - "object_name": "compass", - "level1": "stationery", - "level2": "compass", + "object_name": "item", + "level1": "item", + "level2": "item", "level3": null, "level4": null, "level5": null }, { - "object_name": "bowl", - "level1": "kitchen_supplies", - "level2": "bowl", + "object_name": "square_building_blocks", + "level1": "building_blocks", + "level2": "square_building_blocks", "level3": null, "level4": null, "level5": null }, { - "object_name": "blue_long_towel", - "level1": "daily_necessities", - "level2": "blue_long_towel", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null @@ -79162,23 +89642,23 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "the gripper move the object." + "take the items off the building blocks with both hands and place them on the table." ], "sub_tasks": [ { - "subtask": "Grasp the XX with the right gripper", + "subtask": "Grasp the steel tube on the cube block with the right gripper", "subtask_index": 0 }, { - "subtask": "Place the XX on the table with the left gripper", + "subtask": "Grasp the steel tube on the cube block with the left gripper", "subtask_index": 1 }, { - "subtask": "Place the XX on the table with the right gripper", + "subtask": "Place the steel tube on the table with the left gripper", "subtask_index": 2 }, { - "subtask": "Grasp the XX with the left gripper", + "subtask": "Place the steel tube on the table with the right gripper", "subtask_index": 3 }, { @@ -79192,29 +89672,31 @@ ], "atomic_actions": [ "grasp", - "lift", - "lower" + "pick", + "place" ], "robot_name": [ - "Agilex_Cobot_Magic" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -79225,23 +89707,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 199, - "total_frames": 103966, + "total_episodes": 50, + "total_frames": 6348, "fps": 30, "total_tasks": 6, - "total_videos": 597, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "1.34 GB" + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "239.11 MB" }, - "frame_num": 103966, - "dataset_size": "1.34 GB", - "data_structure": "Agilex_Cobot_Magic_move_object_beige_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (187 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 6348, + "dataset_size": "239.11 MB", + "data_structure": "Airbot_MMK2_take_part_both_hands_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:198" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -79313,10 +89795,33 @@ "has_audio": false } }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, "observation.state": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -79325,32 +89830,42 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -79359,26 +89874,36 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -79418,17 +89943,17 @@ }, "subtask_annotation": { "names": null, - "dtype": "int32", "shape": [ 5 - ] + ], + "dtype": "int32" }, "scene_annotation": { "names": null, - "dtype": "int32", "shape": [ 1 - ] + ], + "dtype": "int32" }, "eef_sim_pose_state": { "names": [ @@ -79445,10 +89970,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_sim_pose_action": { "names": [ @@ -79465,130 +89990,70 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", "shape": [ 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" ], - "dtype": "float32", - "shape": [ - 2 - ] + "dtype": "int32" } }, "authors": { @@ -79610,9 +90075,341 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_take_block_both_hands": { + "Galbot_g1_steamer_storage_baozi_g": { + "path": "Galbot_g1_steamer_storage_baozi_g", + "dataset_name": "steamer_storage_baozi_g", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Grasp the pot lid with left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "baozi", + "level1": "food", + "level2": "baozi", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "steamer", + "level1": "cookware", + "level2": "steamer", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pot_lid", + "level1": "daily_necessities", + "level2": "pot_lid", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-87860", + "dataset_size": "1.6GB", + "statistics": { + "total_episodes": 89, + "total_frames": 87860, + "total_tasks": 1, + "total_videos": 267, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "fc0c05a2-5b0d-4aa4-9329-eb16047d2f03", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the pot lid with left gripper", + "Place the pot lid on the steamer with left gripper", + "End", + "Place the baozi on the steamer with right gripper", + "Grasp the baozi in the plate with right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Galbot_g1_steamer_storage_baozi_g_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Galbot_g1_steamer_storage_baozi_g_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "R1_Lite_open_and_close_curtains": { + "path": "R1_Lite_open_and_close_curtains", + "dataset_name": "open_and_close_curtains", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Pull the outeouter curtains open on both sides with the both gripper", + "objects": [ + { + "object_name": "curtains", + "level1": "clothing", + "level2": "curtains", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-226694", + "dataset_size": "8.5GB", + "statistics": { + "total_episodes": 98, + "total_frames": 226694, + "total_tasks": 1, + "total_videos": 294, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "184bad58-5213-4be2-bb93-7c6d271bf034", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Pull the outeouter curtains open on both sides with the both gripper", + "Pull the outer curtains open on both sides", + "End", + "Pull the inner curtains open on both sides", + "Close the outer curtains on both sides", + "Pull the inner curtains open on left sides with the left gripper", + "Close the right inner curtain with the right gripper", + "Pull the inner curtains open on right sides with the right gripper", + "Close the left inner curtain with the left gripper", + "Close the outeouter curtains on both sides with the both gripper", + "Close the inner curtains on both sides", + "abnormal", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_open_and_close_curtains_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_open_and_close_curtains_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "leju_robot_moving_parts_c": { + "path": "leju_robot_moving_parts_c", + "dataset_name": "moving_parts_c", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the white part on the table with left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "parts", + "level1": "mechanical_parts", + "level2": "parts", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cabinet", + "level1": "home_storage", + "level2": "cabinet", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-716756", + "dataset_size": "44.0GB", + "statistics": { + "total_episodes": 490, + "total_frames": 716756, + "total_tasks": 1, + "total_videos": 1470, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "7aa945d0-afcc-41a9-bc85-6d19914ec42d", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the white part on the table with left gripper", + "Grasp the white part with left gripper", + "Place the gray part on the table with right gripper", + "Grasp the gray part with right gripper", + "End", + "Return to the initial position at the workbench.", + "Move the small component to the workbench.", + "Place the white part on the table with right gripper", + "Insert the small component into the corresponding slot on the workbench.", + "Pick up the small component from the shelf.", + "Move to the table behind body", + "Grasp the white part with right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_moving_parts_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_moving_parts_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Agilex_Cobot_Magic_close_drawer_top": { "task_categories": [ "robotics" ], @@ -79642,11 +90439,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_take_block_both_hands", + "dataset_name": "Agilex_Cobot_Magic_close_drawer_top", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "living_room", + "level1": "office_workspace", + "level2": "office", "level3": null, "level4": null, "level5": null @@ -79654,17 +90451,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "square_building_blocks", - "level1": "toys", - "level2": "square_building_blocks", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "paper_boxes", - "level1": "home_storage", - "level2": "paper_boxes", + "object_name": "three_layer_transparent_drawer", + "level1": "laboratory_supplies", + "level2": "three_layer_transparent_drawer", "level3": null, "level4": null, "level5": null @@ -79672,7 +90469,7 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the building blocks simultaneously with both hands and take them out of the paper box." + "close the top drawer." ], "sub_tasks": [ { @@ -79680,57 +90477,42 @@ "subtask_index": 0 }, { - "subtask": "Grasp the blue cube block on the paper box with the left gripper", + "subtask": "Push the top drawer closed", "subtask_index": 1 }, { - "subtask": "Static", + "subtask": "Use the right gripper to touch the topmost layer of the storage cabinet", "subtask_index": 2 }, - { - "subtask": "Grasp the green cube block on the paper box with the right gripper", - "subtask_index": 3 - }, - { - "subtask": "Place the green cube block on the table with the right gripper", - "subtask_index": 4 - }, - { - "subtask": "Place the blue cube block on the table with the left gripper", - "subtask_index": 5 - }, { "subtask": "null", - "subtask_index": 6 + "subtask_index": 3 } ], "atomic_actions": [ "grasp", - "pick", - "place" + "push" ], "robot_name": [ - "Airbot_MMK2" + "Agilex_Cobot_Magic" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -79741,23 +90523,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 47, - "total_frames": 4793, + "total_episodes": 50, + "total_frames": 9800, "fps": 30, - "total_tasks": 7, - "total_videos": 188, + "total_tasks": 4, + "total_videos": 150, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "221.66 MB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "96.54 MB" }, - "frame_num": 4793, - "dataset_size": "221.66 MB", - "data_structure": "Airbot_MMK2_take_block_both_hands_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 9800, + "dataset_size": "96.54 MB", + "data_structure": "Agilex_Cobot_Magic_close_drawer_top_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:46" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -79829,33 +90611,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, "observation.state": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -79864,42 +90623,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -79908,36 +90657,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -79977,17 +90716,17 @@ }, "subtask_annotation": { "names": null, + "dtype": "int32", "shape": [ 5 - ], - "dtype": "int32" + ] }, "scene_annotation": { "names": null, + "dtype": "int32", "shape": [ 1 - ], - "dtype": "int32" + ] }, "eef_sim_pose_state": { "names": [ @@ -80004,10 +90743,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_sim_pose_action": { "names": [ @@ -80024,70 +90763,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" ], - "dtype": "int32" + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } }, "authors": { @@ -80109,23 +90908,22 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Split_aloha_pour_rice": { - "path": "Split_aloha_pour_rice", - "dataset_name": "pour_rice", + "AIRBOT_MMK2_mobile_phone_storage": { + "path": "AIRBOT_MMK2_mobile_phone_storage", + "dataset_name": "mobile_phone_storage", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "five_finger_hand" ], "scene_type": [], "atomic_actions": [ - "place", - "pick", "grasp", - "pour" + "pick", + "place" ], - "tasks": "Grasp the cup with rice in it with left gripper", + "tasks": "End", "objects": [ { "object_name": "table", @@ -80136,43 +90934,35 @@ "level5": null }, { - "object_name": "rice", - "level1": "food", - "level2": "rice", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "rice_container", - "level1": "container", - "level2": "rice_container", + "object_name": "mobile_phone", + "level1": "communication_supplies", + "level2": "mobile_phone", "level3": null, "level4": null, "level5": null }, { - "object_name": "cup", + "object_name": "calculator_box", "level1": "container", - "level2": "cup", + "level2": "calculator_box", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-504182", - "dataset_size": "4.9GB", + "frame_range": "0-9492", + "dataset_size": "496.3MB", "statistics": { - "total_episodes": 740, - "total_frames": 504182, + "total_episodes": 49, + "total_frames": 9492, "total_tasks": 1, - "total_videos": 2220, + "total_videos": 196, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "004a37ce-a00a-4547-a5a0-0538af2c8a99", + "dataset_uuid": "ae74a19b-7e4c-4f10-b378-faa014a9f006", "language": [ "en", "zh" @@ -80181,17 +90971,13 @@ "robotics" ], "sub_tasks": [ - "Grasp the cup with rice in it with left gripper", - "Place the bowl in the center of view with the right gripper", "End", - "Place the cup on the table with the left gripper", + "Place the telephone on the table with the right gripper", "Static", - "Pick up the bowl with the right gripper", + "Place the telephone on the box with the left gripper", + "Grasp the telephone with the left gripper", + "Grasp the telephone with the right gripper", "Abnormal", - "Move the bowl in the center of view with right gripper", - "Move the bowl to the center of view with the right gripper", - "Pick up the cup with rice in it with the left gripper", - "Pour the rice from the cup into the bowl with the left gripper", "null" ], "annotations": { @@ -80229,10 +91015,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Split_aloha_pour_rice_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Split_aloha_pour_rice_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_mobile_phone_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_mobile_phone_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Galaxea_R1_Lite_mix_blue_yellow_right": { + "Airbot_MMK2_take_bowl_sponge": { "task_categories": [ "robotics" ], @@ -80262,11 +91048,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_mix_blue_yellow_right", + "dataset_name": "Airbot_MMK2_take_bowl_sponge", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "office_workspace", - "level2": "office", + "level1": "household", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -80274,49 +91060,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "blue_pigment", - "level1": "materials", - "level2": "blue_pigment", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "red_pigment", - "level1": "materials", - "level2": "red_pigment", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yellow_pigment", - "level1": "materials", - "level2": "yellow_pigment", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "test_tube_rack", - "level1": "holding_utensils", - "level2": "test_tube_rack", + "object_name": "cleaning_sponge", + "level1": "cleaning", + "level2": "cleaning_sponge", "level3": null, "level4": null, "level5": null }, { - "object_name": "test_tubes", - "level1": "laboratory_supplies", - "level2": "test_tubes", + "object_name": "bowl", + "level1": "bowl", + "level2": "bowl", "level3": null, "level4": null, "level5": null }, { - "object_name": "beaker", - "level1": "holding_utensils", - "level2": "beaker", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null @@ -80324,100 +91086,59 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the test tube with yellow pigment and the test tube with blue pigment by grippers and pour them into the beaker." + "pick up the sponge and bowl by hand and place them on the table." ], "sub_tasks": [ { - "subtask": "Abnormal", + "subtask": "Place the sponge on the table with the left gripper", "subtask_index": 0 }, { - "subtask": "Place the test tube into the bowl with right gripper", + "subtask": "Abnormal", "subtask_index": 1 }, { - "subtask": "Grasp the red reagent with the left gripper", + "subtask": "Grasp the sponge on the white basket and with the left gripper", "subtask_index": 2 }, { - "subtask": "Pour the blue reagent into the graduated cylinder with right gripper", + "subtask": "Grasp the bowl on the white basket and with the right gripper", "subtask_index": 3 }, { - "subtask": "Pick up the test tube containing the yellow reagent with right gripper", + "subtask": "End", "subtask_index": 4 }, { - "subtask": "Grasp the yellow reagent with the right gripper", + "subtask": "Place the bowl on the table with the right gripper", "subtask_index": 5 }, - { - "subtask": "Grasp the red reagent with the right gripper", - "subtask_index": 6 - }, - { - "subtask": "Place the test tube into the bowl with the right gripper", - "subtask_index": 7 - }, - { - "subtask": "Pour the yellow reagent into the graduated cylinder with the right gripper", - "subtask_index": 8 - }, - { - "subtask": "End", - "subtask_index": 9 - }, - { - "subtask": "Pour the blue reagent into the graduated cylinder with the right gripper", - "subtask_index": 10 - }, - { - "subtask": "Pour the yellow reagent into the graduated cylinder with right gripper", - "subtask_index": 11 - }, - { - "subtask": "Pick up the test tube containing the blue reagent with right gripper", - "subtask_index": 12 - }, - { - "subtask": "Pour the red reagent into the graduated cylinder with the right gripper", - "subtask_index": 13 - }, - { - "subtask": "end", - "subtask_index": 14 - }, - { - "subtask": "Grasp the blue reagent with the right gripper", - "subtask_index": 15 - }, { "subtask": "null", - "subtask_index": 16 + "subtask_index": 6 } ], "atomic_actions": [ "grasp", "pick", - "place", - "pour" + "place" ], "robot_name": [ - "Galaxea_R1_Lite" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -80434,30 +91155,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 32657, + "total_episodes": 42, + "total_frames": 11308, "fps": 30, - "total_tasks": 17, - "total_videos": 200, + "total_tasks": 7, + "total_videos": 168, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, + "state_dim": 36, + "action_dim": 36, "camera_views": 4, - "dataset_size": "665.69 MB" + "dataset_size": "373.70 MB" }, - "frame_num": 32657, - "dataset_size": "665.69 MB", - "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_mix_blue_yellow_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 11308, + "dataset_size": "373.70 MB", + "data_structure": "Airbot_MMK2_take_bowl_sponge_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (30 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:49" + "train": "0:41" }, "features": { - "observation.images.cam_head_left_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -80466,8 +91187,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -80476,11 +91197,11 @@ "has_audio": false } }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -80489,8 +91210,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -80499,10 +91220,10 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -80512,7 +91233,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -80522,10 +91243,10 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_front_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -80535,7 +91256,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -80548,7 +91269,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -80563,14 +91284,36 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -80585,8 +91328,30 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -80737,66 +91502,6 @@ 2 ], "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" } }, "authors": { @@ -80818,436 +91523,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" - }, - "R1_Lite_move_the_position_of_the_coffee_capsule": { - "path": "R1_Lite_move_the_position_of_the_coffee_capsule", - "dataset_name": "move_the_position_of_the_coffee_capsule", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "place", - "pick", - "grasp" - ], - "tasks": "End", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pen", - "level1": "office_supplies", - "level2": "pen", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "fruit", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bath_ball", - "level1": "daily_necessities", - "level2": "bath_ball", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "can", - "level1": "container", - "level2": "can", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "eraser", - "level1": "office_supplies", - "level2": "eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "hard_cleanser", - "level1": "daily_necessities", - "level2": "hard_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "long_bread", - "level1": "food", - "level2": "long_bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peeler", - "level1": "tool", - "level2": "peeler", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "block", - "level1": "toy", - "level2": "block", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "duck", - "level1": "toy", - "level2": "duck", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soap", - "level1": "daily_necessities", - "level2": "soap", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "container", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "basket", - "level1": "container", - "level2": "basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cola", - "level1": "drink", - "level2": "cola", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "detergent", - "level1": "daily_necessities", - "level2": "detergent", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_beater", - "level1": "electrical_appliances", - "level2": "egg_beater", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "towel", - "level1": "clothing", - "level2": "towel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "orange", - "level1": "fruit", - "level2": "orange", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peach", - "level1": "fruit", - "level2": "peach", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "marker", - "level1": "office_supplies", - "level2": "marker", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "rubiks_cube", - "level1": "toy", - "level2": "rubiks_cube", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bread_slice", - "level1": "food", - "level2": "bread_slice", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "brush", - "level1": "daily_necessities", - "level2": "brush", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yogurt", - "level1": "drink", - "level2": "yogurt", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "power_strip", - "level1": "electric_appliance", - "level2": "power_strip", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "milk", - "level1": "drink", - "level2": "milk", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soda", - "level1": "drink", - "level2": "soda", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "lime", - "level1": "fruit", - "level2": "lime", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "coffee_capsule", - "level1": "drink", - "level2": "coffee_capsule", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "dish", - "level1": "container", - "level2": "dish", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "glass", - "level1": "furniture", - "level2": "glass", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_yolk_pastry", - "level1": "food", - "level2": "egg_yolk_pastry", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "glasses_case", - "level1": "daily_necessities", - "level2": "glasses_case", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "gum", - "level1": "daily_necessities", - "level2": "gum", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tape", - "level1": "daily_necessities", - "level2": "tape", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soft_cleanser", - "level1": "daily_necessities", - "level2": "soft_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chips", - "level1": "food", - "level2": "chips", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chocolate", - "level1": "food", - "level2": "chocolate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cookie", - "level1": "food", - "level2": "cookie", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "spoon", - "level1": "tableware", - "level2": "spoon", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-14334", - "dataset_size": "500.1MB", - "statistics": { - "total_episodes": 58, - "total_frames": 14334, - "total_tasks": 1, - "total_videos": 232, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "b0f7658f-e170-4911-afdf-9af621bfa0fe", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "End", - "Grasp the coffee capsule with left gripper", - "Place the coffee capsule on the table with right gripper", - "Grasp the coffee capsule with right gripper", - "Static", - "Place the coffee capsule on the table with left gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_move_the_position_of_the_coffee_capsule_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_move_the_position_of_the_coffee_capsule_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Galaxea_R1_Lite_pour_liquid_mrable_bar_counter": { + "Airbot_MMK2_rotate_cube_face": { "task_categories": [ "robotics" ], @@ -81277,11 +91555,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_pour_liquid_mrable_bar_counter", + "dataset_name": "Airbot_MMK2_rotate_cube_face", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "househhold", - "level2": "kitchen", + "level1": "household", + "level2": "study_room", "level3": null, "level4": null, "level5": null @@ -81289,41 +91567,9 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "marble_bar_counter", - "level1": "furniture", - "level2": "marble_bar_counter", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plastic_cup", - "level1": "cups", - "level2": "plastic_cup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "green_dish", - "level1": "plates", - "level3": "green_dish", - "level2": null, - "level4": null, - "level5": null - }, - { - "object_name": "pink_bowl", - "level1": "plastic_bowls", - "level2": "pink_bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "liquid", - "level1": "materials", - "level2": "liquid", + "object_name": "rubik's_cube", + "level1": "toys", + "level2": "rubik's_cube", "level3": null, "level4": null, "level5": null @@ -81331,172 +91577,64 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "use a gripper to pick up the cup and pour the liquid into a bowl or tray." + "pick up the rubik's cube with left hand, rotate it once with right hand, and then put it down with left hand." ], "sub_tasks": [ { - "subtask": "Pour the orange juice into the pink bowl with left gripper", + "subtask": "Adjust the rubik's cube with the left gripper", "subtask_index": 0 }, { - "subtask": "Pour the black tea into the pink bowl with the right gripper", + "subtask": "Grasp the rubik's cube with the left gripper", "subtask_index": 1 }, { - "subtask": "Grasp the glass of black tea with the right gripper", + "subtask": "End", "subtask_index": 2 }, { - "subtask": "Pour the orange juice into the blue basin with right gripper", + "subtask": "Place the rubik's cube on the table with the left gripper", "subtask_index": 3 }, { - "subtask": "Left gripper", + "subtask": "Move the rightmost combination block of the rubik's cube with the right gripper", "subtask_index": 4 }, { - "subtask": "Pour the tea into the pink bowl with left gripper", + "subtask": "Abnormal", "subtask_index": 5 }, { - "subtask": "Grasp the glass of orange juice with the right gripper", + "subtask": "Lift up the rubik's cube with the left gripper", "subtask_index": 6 }, - { - "subtask": "Pour the tea into the pink bowl with right gripper", - "subtask_index": 7 - }, - { - "subtask": "Pick up blue cup filled with tea with right gripper", - "subtask_index": 8 - }, - { - "subtask": "Pour the orange juice into the pink bowl with the right gripper", - "subtask_index": 9 - }, - { - "subtask": "Pour the orange juice into the pink bowl with the left gripper", - "subtask_index": 10 - }, - { - "subtask": "Pour the orange juice into the blue basin with left gripper", - "subtask_index": 11 - }, - { - "subtask": "Pour the orange juice into the green bowl with the right gripper", - "subtask_index": 12 - }, - { - "subtask": "Place blue cup with tea on the table with right gripper", - "subtask_index": 13 - }, - { - "subtask": "Pour the orange juice into the green bowl with the left gripper", - "subtask_index": 14 - }, - { - "subtask": "Pour the orange juice into the pink bowl with right gripper", - "subtask_index": 15 - }, - { - "subtask": "Place blue cup with orange juice on the table with right gripper", - "subtask_index": 16 - }, - { - "subtask": "Pour the black tea into the green bowl with the right gripper", - "subtask_index": 17 - }, - { - "subtask": "Pick up blue cup filled with orange juice with right gripper", - "subtask_index": 18 - }, - { - "subtask": "Place blue cup with orange juice on the table with left gripper", - "subtask_index": 19 - }, - { - "subtask": "Grasp the glass of black tea with the left gripper", - "subtask_index": 20 - }, - { - "subtask": "Pour the black tea into the green bowl with the left gripper", - "subtask_index": 21 - }, - { - "subtask": "Pour the tea into the blue basin with right gripper", - "subtask_index": 22 - }, - { - "subtask": "Pick up blue cup filled with tea with left gripper", - "subtask_index": 23 - }, - { - "subtask": "Pour the tea into the blue basin with left gripper", - "subtask_index": 24 - }, - { - "subtask": "Pour the black tea into the pink bowl with the left gripper", - "subtask_index": 25 - }, - { - "subtask": "Grasp the glass of orange juice with the left gripper", - "subtask_index": 26 - }, - { - "subtask": "Place the glass cup with the right gripper", - "subtask_index": 27 - }, - { - "subtask": "Pick up blue cup filled with orange juice with left gripper", - "subtask_index": 28 - }, - { - "subtask": "End", - "subtask_index": 29 - }, - { - "subtask": "Place blue cup with shrimp on the table with right gripper", - "subtask_index": 30 - }, - { - "subtask": "Right gripper", - "subtask_index": 31 - }, - { - "subtask": "Place blue cup with tea on the table with left gripper", - "subtask_index": 32 - }, - { - "subtask": "Place the glass cup with the left gripper", - "subtask_index": 33 - }, { "subtask": "null", - "subtask_index": 34 + "subtask_index": 7 } ], "atomic_actions": [ "grasp", "pick", "place", - "pour" + "filp" ], "robot_name": [ - "Galaxea_R1_Lite" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -81513,30 +91651,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 100, - "total_frames": 43652, + "total_episodes": 145, + "total_frames": 55057, "fps": 30, - "total_tasks": 35, - "total_videos": 400, + "total_tasks": 8, + "total_videos": 580, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, + "state_dim": 36, + "action_dim": 36, "camera_views": 4, - "dataset_size": "1.70 GB" + "dataset_size": "2.13 GB" }, - "frame_num": 43652, - "dataset_size": "1.70 GB", - "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_pour_liquid_mrable_bar_counter_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (88 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 55057, + "dataset_size": "2.13 GB", + "data_structure": "Airbot_MMK2_rotate_cube_face_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (133 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:99" + "train": "0:144" }, "features": { - "observation.images.cam_head_left_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -81545,8 +91683,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -81555,11 +91693,11 @@ "has_audio": false } }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -81568,8 +91706,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -81578,10 +91716,10 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -81591,7 +91729,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -81601,10 +91739,10 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_front_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -81614,7 +91752,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -81627,7 +91765,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -81642,14 +91780,36 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -81663,9 +91823,31 @@ "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -81816,66 +91998,6 @@ 2 ], "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" } }, "authors": { @@ -81897,217 +92019,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" - }, - "AIRBOT_MMK2_push_piston": { - "path": "AIRBOT_MMK2_push_piston", - "dataset_name": "push_piston", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Static", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "syringe", - "level1": "medical_supplies", - "level2": "syringe", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-15278", - "dataset_size": "598.3MB", - "statistics": { - "total_episodes": 47, - "total_frames": 15278, - "total_tasks": 1, - "total_videos": 188, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "eef01bf4-6600-4121-9501-b7651d8e26c5", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Static", - "Push the piston with the right gripper", - "End", - "Grasp the syringe with the left gripper", - "Grasp the piston with the right gripper", - "Place the syringe on the table with the left gripper", - "Lift the syringe with the left gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_push_piston_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_push_piston_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "R1_Lite_clean_the_sink": { - "path": "R1_Lite_clean_the_sink", - "dataset_name": "clean_the_sink", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place", - "open", - "wipe", - "close" - ], - "tasks": "Pick up the cloth", - "objects": [ - { - "object_name": "washbasin", - "level1": "container", - "level2": "washbasin", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "faucet", - "level1": "tool", - "level2": "faucet", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "rag", - "level1": "clothing", - "level2": "rag", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-104391", - "dataset_size": "3.8GB", - "statistics": { - "total_episodes": 120, - "total_frames": 104391, - "total_tasks": 1, - "total_videos": 360, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "d29e439c-d1a9-41e5-ab2b-e264195dda27", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Pick up the cloth", - "Turn on the faucet", - "Wipe the sink", - "Turn off the faucet", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_clean_the_sink_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_clean_the_sink_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_move_block": { + "Airbot_MMK2_storage_tumbler_umbrella": { "task_categories": [ "robotics" ], @@ -82137,11 +92051,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_move_block", + "dataset_name": "Airbot_MMK2_storage_tumbler_umbrella", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "bedroom", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -82149,17 +92063,33 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "square_building_blocks", - "level1": "toys", - "level2": "square_building_blocks", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "early_education_toys", - "level1": "toys", - "level2": "early_education_toys", + "object_name": "basket", + "level1": "home_storage", + "level2": "basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "umbrella", + "level1": "tool", + "level2": "umbrella", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tumbler", + "level1": "container", + "level2": "tumbler", "level3": null, "level4": null, "level5": null @@ -82167,23 +92097,23 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "put the square blocks into the circular toy." + "pick up the umbrella with left hand and put it in the basket, then use right hand to pick up the thermos and put it in the basket." ], "sub_tasks": [ { - "subtask": "Place the yellow block in the blue circle with the right gripper", + "subtask": "Place the cup into the white basket with the right gripper", "subtask_index": 0 }, { - "subtask": "Grasp the yellow block with the left gripper", + "subtask": "Grasp the umbrella with the left gripper", "subtask_index": 1 }, { - "subtask": "Grasp the yellow block with the rightt gripper", + "subtask": "Place the umbrella into the white basket with the left gripper", "subtask_index": 2 }, { - "subtask": "Place the yellow block in the blue circle with the left gripper", + "subtask": "Static", "subtask_index": 3 }, { @@ -82191,8 +92121,12 @@ "subtask_index": 4 }, { - "subtask": "null", + "subtask": "Grasp the cup with the right gripper", "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 } ], "atomic_actions": [ @@ -82232,23 +92166,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 7264, + "total_episodes": 48, + "total_frames": 14373, "fps": 30, - "total_tasks": 6, - "total_videos": 200, + "total_tasks": 7, + "total_videos": 192, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "300.47 MB" + "dataset_size": "542.57 MB" }, - "frame_num": 7264, - "dataset_size": "300.47 MB", - "data_structure": "Airbot_MMK2_move_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 14373, + "dataset_size": "542.57 MB", + "data_structure": "Airbot_MMK2_storage_tumbler_umbrella_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:49" + "train": "0:47" }, "features": { "observation.images.cam_head_rgb": { @@ -82602,9 +92536,9 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Tianqin_A2_box_storage_part": { - "path": "Tianqin_A2_box_storage_part", - "dataset_name": "box_storage_part", + "Cobot_Magic_take_out_a_pen_from_the_pen_holder": { + "path": "Cobot_Magic_take_out_a_pen_from_the_pen_holder", + "dataset_name": "take_out_a_pen_from_the_pen_holder", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -82615,7 +92549,7 @@ "pick", "place" ], - "tasks": "Place the data cable in the another box", + "tasks": "Place the pen on the table with the left gripper", "objects": [ { "object_name": "table", @@ -82626,35 +92560,43 @@ "level5": null }, { - "object_name": "part", - "level1": "tool", - "level2": "part", + "object_name": "pen", + "level1": "office_supplies", + "level2": "pen", "level3": null, "level4": null, "level5": null }, { - "object_name": "box", - "level1": "home_storage", - "level2": "box", + "object_name": "pen_holder", + "level1": "office_supplies", + "level2": "pen_holder", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tablecloth", + "level1": "fabric", + "level2": "tablecloth", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-681531", - "dataset_size": "25.6GB", + "operation_platform_height": 77.2, + "frame_range": "0-225692", + "dataset_size": "6.5GB", "statistics": { - "total_episodes": 1104, - "total_frames": 681531, - "total_tasks": 1, - "total_videos": 3312, - "total_chunks": 2, + "total_episodes": 488, + "total_frames": 225692, + "total_tasks": 5, + "total_videos": 1464, + "total_chunks": 1, "chunks_size": 1000, - "fps": 30 + "fps": 50 }, - "dataset_uuid": "7a1de37c-d8b0-476a-b503-99a0fd93cafd", + "dataset_uuid": "836047ba-9812-4f07-b6bd-1d98e5c01c1d", "language": [ "en", "zh" @@ -82663,10 +92605,14 @@ "robotics" ], "sub_tasks": [ - "Place the data cable in the another box", + "Place the pen on the table with the left gripper", "Abnormal", - "Grasp the data cable", "End", + "Pick up the pen with the left gripper", + "Pick up the pen from the pen holder", + "Place the pen on the table with the right gripper", + "Pick up the pen with the right gripper", + "Place the pen on the table", "null" ], "annotations": { @@ -82704,24 +92650,23 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Tianqin_A2_box_storage_part_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)", - "structure": "Tianqin_A2_box_storage_part_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)" + "data_schema": "Cobot_Magic_take_out_a_pen_from_the_pen_holder_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_take_out_a_pen_from_the_pen_holder_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Cobot_Magic_food_packaging": { - "path": "Cobot_Magic_food_packaging", - "dataset_name": "food_packaging", + "R1_Lite_move_the_position_of_the_long_bread": { + "path": "R1_Lite_move_the_position_of_the_long_bread", + "dataset_name": "move_the_position_of_the_long_bread", "robot_type": "", "end_effector_type": [ "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ - "grasp", - "pull", "place", - "pick" + "pick", + "grasp" ], - "tasks": "Place the banana into the package", + "tasks": "Grasp the long bread with right gripper", "objects": [ { "object_name": "table", @@ -82732,9 +92677,9 @@ "level5": null }, { - "object_name": "lunch_box", - "level1": "container", - "level2": "lunch_box", + "object_name": "pen", + "level1": "office_supplies", + "level2": "pen", "level3": null, "level4": null, "level5": null @@ -82748,542 +92693,513 @@ "level5": null }, { - "object_name": "bread", - "level1": "food", - "level2": "bread", + "object_name": "bath_ball", + "level1": "daily_necessities", + "level2": "bath_ball", "level3": null, "level4": null, "level5": null }, { - "object_name": "pear", - "level1": "fruit", - "level2": "pear", + "object_name": "bowl", + "level1": "container", + "level2": "bowl", "level3": null, "level4": null, "level5": null }, { - "object_name": "cucumber", - "level1": "vegetable", - "level2": "cucumber", + "object_name": "plate", + "level1": "container", + "level2": "plate", "level3": null, "level4": null, "level5": null }, { - "object_name": "bag", + "object_name": "can", "level1": "container", - "level2": "bag", + "level2": "can", "level3": null, "level4": null, "level5": null }, { - "object_name": "lemon", - "level1": "fruit", - "level2": "lemon", + "object_name": "eraser", + "level1": "office_supplies", + "level2": "eraser", "level3": null, "level4": null, "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-1470087", - "dataset_size": "25.8GB", - "statistics": { - "total_episodes": 798, - "total_frames": 1470087, - "total_tasks": 2, - "total_videos": 2394, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "d6b80e23-6bac-471f-a759-a475112b145d", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the banana into the package", - "Right hand grabs cucumber.", - "Right hand moves lunch bag.", - "Right hand grabs lunch bag.", - "Discard.", - "end", - "Right hand grabs banana.", - "Grasp the box with the right gripper", - "Grasp the package with the left gripper", - "Left hand grabs lunch box.", - "Hold the package and Pick up the bread", - "Left hand secures lunch bag.", - "Abnormal", - "Place the lemon into the package with the right gripper", - "Grasp the banana with the right gripper", - "Right hand grabs lunch box.", - "Left hand stands lunch bag upright.", - "Place the pear into the package with the right gripper", - "Place the pear into the package", - "Grasp the cucumber with the right gripper", - "Place the box into the package", - "Hold the package and Pick up the box", - "Hold the package and Pick up the cucumber", - "Right hand stands lunch bag upright.", - "Pick up the banana", - "Right hand grabs lemon.", - "Place the banana into the package with the right gripper", - "Place the bread into the package", - "Right hand places into bag.", - "Left hand moves lunch bag.", - "Grasp the lemon with the right gripper", - "Right hand grabs bread.", - "Pick up the pear", - "zip up the zipper to close the bag", - "Pick up the box", - "Right hand grabs pear.", - "Right hand pulls zipper.", - "Pick up the bread", - "Grasp the pear with the right gripper", - "Pick up the cucumber", - "Hold the package and Pick up the pear", - "Place the box into the package with the right gripper", - "Place the cucumber into the package with the right gripper", - "Right hand receives lunch box.", - "Hold the package and Pick up the banana", - "Left hand grabs lunch bag.", - "Left hand lifts lunch box.", - "Place the cucumber into the package", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_food_packaging_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_food_packaging_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "agilex_cobot_magic_pass_object_right_to_left_white_tablecloth": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } - ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "agilex_cobot_magic_pass_object_right_to_left_white_tablecloth", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "commercial_convenience", - "level2": "supermarket", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ { - "object_name": "table", - "level1": "home_storage", - "level2": "table", + "object_name": "hard_cleanser", + "level1": "daily_necessities", + "level2": "hard_cleanser", "level3": null, "level4": null, "level5": null }, { - "object_name": "ambrosial_yogurt", + "object_name": "long_bread", "level1": "food", - "level2": "ambrosial_yogurt", + "level2": "long_bread", "level3": null, "level4": null, "level5": null }, { - "object_name": "banana", - "level1": "food", - "level2": "banana", + "object_name": "peeler", + "level1": "tool", + "level2": "peeler", "level3": null, "level4": null, "level5": null }, { - "object_name": "long_bread", - "level1": "food", - "level2": "long_bread", + "object_name": "block", + "level1": "toy", + "level2": "block", "level3": null, "level4": null, "level5": null }, { - "object_name": "milk", - "level1": "food", - "level2": "milk", + "object_name": "duck", + "level1": "toy", + "level2": "duck", "level3": null, "level4": null, "level5": null }, { - "object_name": "yogurt", - "level1": "food", - "level2": "yogurt", + "object_name": "soap", + "level1": "daily_necessities", + "level2": "soap", "level3": null, "level4": null, "level5": null }, { - "object_name": "grape", - "level1": "food", - "level2": "grape", + "object_name": "box", + "level1": "container", + "level2": "box", "level3": null, "level4": null, "level5": null }, { - "object_name": "ham_sausage", - "level1": "food", - "level2": "ham_sausage", + "object_name": "basket", + "level1": "container", + "level2": "basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "eggplant", - "level1": "food", - "level2": "eggplant", + "object_name": "cola", + "level1": "drink", + "level2": "cola", "level3": null, "level4": null, "level5": null }, { - "object_name": "chewing_gum", - "level1": "food", - "level2": "chewing_gum", + "object_name": "detergent", + "level1": "daily_necessities", + "level2": "detergent", "level3": null, "level4": null, "level5": null }, { - "object_name": "eyeglass_case", - "level1": "laboratory_supplies", - "level2": "eyeglass_case", + "object_name": "egg_beater", + "level1": "electrical_appliances", + "level2": "egg_beater", "level3": null, "level4": null, "level5": null }, { - "object_name": "rubik's_cube", - "level1": "toys", - "level2": "rubik's_cube", + "object_name": "towel", + "level1": "clothing", + "level2": "towel", "level3": null, "level4": null, "level5": null }, { - "object_name": "purple_trash_bag", - "level1": "trash", - "level2": "purple_trash_bag", + "object_name": "orange", + "level1": "fruit", + "level2": "orange", "level3": null, "level4": null, "level5": null }, { - "object_name": "cleanser", - "level1": "daily_necessities", - "level2": "cleanser", + "object_name": "peach", + "level1": "fruit", + "level2": "peach", "level3": null, "level4": null, "level5": null }, { - "object_name": "bathing_in_flowers", - "level1": "daily_necessities", - "level2": "bathing_in_flowers", + "object_name": "marker", + "level1": "office_supplies", + "level2": "marker", "level3": null, "level4": null, "level5": null }, { - "object_name": "whiteboard_eraser", - "level1": "stationery", - "level2": "whiteboard_eraser", + "object_name": "rubiks_cube", + "level1": "toy", + "level2": "rubiks_cube", "level3": null, "level4": null, "level5": null }, { - "object_name": "candle", - "level1": "daily_necessities", - "level2": "candle", + "object_name": "bread_slice", + "level1": "food", + "level2": "bread_slice", "level3": null, "level4": null, "level5": null }, { - "object_name": "white_table_cloths", - "level1": "laboratory_supplies", - "level2": "white_table_cloths", + "object_name": "brush", + "level1": "daily_necessities", + "level2": "brush", "level3": null, "level4": null, "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "use the right gripper to pick up the item and transfer it from the right gripper to the left gripper." - ], - "sub_tasks": [ - { - "subtask": "The left gripper places milk on the left side of the table", - "subtask_index": 0 - }, - { - "subtask": "\nPass the square chewing gun to the left gripper\n", - "subtask_index": 1 - }, - { - "subtask": "The left gripper places Rubik's Cube on the left side of the table", - "subtask_index": 2 - }, - { - "subtask": "\nPass the yogurt to the left gripper\n", - "subtask_index": 3 - }, - { - "subtask": "The left gripper places grey towel on the left side of the table", - "subtask_index": 4 - }, - { - "subtask": "End", - "subtask_index": 5 - }, - { - "subtask": "Use the right gripper to grab the banana on the right side of the table", - "subtask_index": 6 - }, - { - "subtask": "The left gripper places bananal on the left side of the table", - "subtask_index": 7 - }, - { - "subtask": "\nPass the shower spherer to the left gripper\n", - "subtask_index": 8 - }, - { - "subtask": "The left gripper places yogurt on the left side of the table", - "subtask_index": 9 - }, - { - "subtask": "\nPass the eggplant to the left gripper\n", - "subtask_index": 10 - }, - { - "subtask": "\nPass the eyeglass case to the left gripper\n", - "subtask_index": 11 - }, - { - "subtask": "Use the right gripper to grab the banana on the right side of the table", - "subtask_index": 12 - }, - { - "subtask": "The left gripper places eggplant on the left side of the table", - "subtask_index": 13 - }, - { - "subtask": "\nPass the eggplant to the left gripper\n", - "subtask_index": 14 }, { - "subtask": "Use the right gripper to grab the milk on the right side of the table", - "subtask_index": 15 + "object_name": "yogurt", + "level1": "drink", + "level2": "yogurt", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "The left gripper places banana on the left side of the table", - "subtask_index": 16 + "object_name": "power_strip", + "level1": "electric_appliance", + "level2": "power_strip", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "\nPass the Rubik's Cube to the left gripper\n", - "subtask_index": 17 + "object_name": "milk", + "level1": "drink", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Use the right gripper to grab the eggplant on the right side of the table", - "subtask_index": 18 + "object_name": "soda", + "level1": "drink", + "level2": "soda", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Use the right gripper to grab the square chewing gun on the right side of the table", - "subtask_index": 19 + "object_name": "lime", + "level1": "fruit", + "level2": "lime", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Use the right gripper to grab the banana the on right side of the table", - "subtask_index": 20 + "object_name": "coffee_capsule", + "level1": "drink", + "level2": "coffee_capsule", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Use the right gripper to grab the shower sphere on the right side of the table", - "subtask_index": 21 + "object_name": "dish", + "level1": "container", + "level2": "dish", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Use the right gripper to grab the bananal on the right side of the table", - "subtask_index": 22 + "object_name": "glass", + "level1": "furniture", + "level2": "glass", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "The left gripper places shower sphere on the left side of the table", - "subtask_index": 23 + "object_name": "egg_yolk_pastry", + "level1": "food", + "level2": "egg_yolk_pastry", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "\nPass the grey towel to the left gripper\n", - "subtask_index": 24 + "object_name": "glasses_case", + "level1": "daily_necessities", + "level2": "glasses_case", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Use the right gripper to grab the blue blackboard erasure on the right side of the table", - "subtask_index": 25 + "object_name": "gum", + "level1": "daily_necessities", + "level2": "gum", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "\nPass the banana to the left gripper\n", - "subtask_index": 26 + "object_name": "tape", + "level1": "daily_necessities", + "level2": "tape", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Use the right gripper to grab the purple garbage bag on the right side of the table", - "subtask_index": 27 + "object_name": "soft_cleanser", + "level1": "daily_necessities", + "level2": "soft_cleanser", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "\nPass the purple garbage bag to the left gripper\n", - "subtask_index": 28 + "object_name": "chips", + "level1": "food", + "level2": "chips", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "The left gripper places blue blackboard erasure on the left side of the table", - "subtask_index": 29 + "object_name": "chocolate", + "level1": "food", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Use the right gripper to grab the eggplant on the right side of the table", - "subtask_index": 30 + "object_name": "cookie", + "level1": "food", + "level2": "cookie", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Use the right gripper to grab the square chewing gum on the right side of the table", - "subtask_index": 31 - }, + "object_name": "spoon", + "level1": "tableware", + "level2": "spoon", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-9249", + "dataset_size": "344.7MB", + "statistics": { + "total_episodes": 31, + "total_frames": 9249, + "total_tasks": 1, + "total_videos": 124, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "bdee9310-93d0-4808-a0ad-4ca01b25ac9f", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the long bread with right gripper", + "Grasp the long bread with left gripper", + "Place the long bread on the table with right gripper", + "Place the long bread on the table with left gripper", + "Abnormal", + "Static", + "End", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_move_the_position_of_the_long_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_move_the_position_of_the_long_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_take_electronics": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ { - "subtask": "The left gripper places square chewing gun on the left side of the table", - "subtask_index": 32 + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_take_electronics", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "industry", + "level2": "factory", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ { - "subtask": "\nPass the shower sphere to the left gripper\n", - "subtask_index": 33 + "object_name": "paper_boxes", + "level1": "baskets", + "level2": "paper_boxes", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "The left gripper places eyeglass case on the left side of the table", - "subtask_index": 34 - }, + "object_name": "lid", + "level1": "storage_utensils", + "level2": "lid", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "take the calculator box and mouse box off the lid and place them on the table." + ], + "sub_tasks": [ { - "subtask": "Use the right gripper to grab the yogurt on the right side of the table", - "subtask_index": 35 + "subtask": "Abnormal", + "subtask_index": 0 }, { - "subtask": "\nPass the blue blackboard erasure to the left gripper\n", - "subtask_index": 36 + "subtask": "Place the mouse box on the table with the left gripper", + "subtask_index": 1 }, { - "subtask": "Use the right gripper to grab the eyeglass case on the right side of the table", - "subtask_index": 37 + "subtask": "Grasp the calculator box on the white lid and with the right gripper", + "subtask_index": 2 }, { - "subtask": "Use the right gripper to grab the Rubik's Cube on the right side of the table", - "subtask_index": 38 + "subtask": "Grasp the mouse box on the white lid and with the left gripper", + "subtask_index": 3 }, { - "subtask": "\nPass the milk to the left gripper\n", - "subtask_index": 39 + "subtask": "End", + "subtask_index": 4 }, { - "subtask": "The left gripper places purple garbage bag on the left side of the table", - "subtask_index": 40 + "subtask": "Place the calculator box on the table with the right gripper", + "subtask_index": 5 }, { "subtask": "null", - "subtask_index": 41 + "subtask_index": 6 } ], "atomic_actions": [ "grasp", - "lift", - "lower", - "handover", - "takeover" + "pick", + "place" ], "robot_name": [ - "agilex_cobot_magic" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -83294,23 +93210,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 99, - "total_frames": 52998, + "total_episodes": 49, + "total_frames": 13465, "fps": 30, - "total_tasks": 42, - "total_videos": 297, + "total_tasks": 7, + "total_videos": 196, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "508.79 MB" + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "400.20 MB" }, - "frame_num": 52998, - "dataset_size": "508.79 MB", - "data_structure": "Agilex_Cobot_Magic_pass_object_right_to_left_white_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 13465, + "dataset_size": "400.20 MB", + "data_structure": "Airbot_MMK2_take_electronics_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:98" + "train": "0:48" }, "features": { "observation.images.cam_head_rgb": { @@ -83382,10 +93298,33 @@ "has_audio": false } }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, "observation.state": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -83394,32 +93333,42 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -83428,26 +93377,36 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -83487,17 +93446,17 @@ }, "subtask_annotation": { "names": null, - "dtype": "int32", "shape": [ 5 - ] + ], + "dtype": "int32" }, "scene_annotation": { "names": null, - "dtype": "int32", "shape": [ 1 - ] + ], + "dtype": "int32" }, "eef_sim_pose_state": { "names": [ @@ -83514,10 +93473,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_sim_pose_action": { "names": [ @@ -83534,130 +93493,70 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", "shape": [ 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" ], - "dtype": "float32", - "shape": [ - 2 - ] + "dtype": "int32" } }, "authors": { @@ -83679,11 +93578,11 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Split_aloha_zip_up_the_document_bag": { - "path": "Split_aloha_zip_up_the_document_bag", - "dataset_name": "zip_up_the_document_bag", + "Cobot_Magic_take_out_the_bread": { + "path": "Cobot_Magic_take_out_the_bread", + "dataset_name": "take_out_the_bread", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -83691,10 +93590,131 @@ "scene_type": [], "atomic_actions": [ "grasp", - "pull", + "place", + "pick" + ], + "tasks": "use the right arm to put a slice of bread into the plate", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread_machine", + "level1": "kitchenware", + "level2": "bread_machine", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread", + "level1": "food", + "level2": "bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tablecloth", + "level1": "clothing", + "level2": "tablecloth", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "container", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-333147", + "dataset_size": "15.1GB", + "statistics": { + "total_episodes": 588, + "total_frames": 333147, + "total_tasks": 6, + "total_videos": 1764, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "2fb93e01-1b87-444e-8fef-493ab51a3c3f", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "use the right arm to put a slice of bread into the plate", + "use the left arm to put a slice of bread into the empty plate", + "use the left arm to take out a slice of bread", + "use the right arm to take out a slice of bread", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_take_out_the_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_take_out_the_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Cobot_Magic_mobile_cube": { + "path": "Cobot_Magic_mobile_cube", + "dataset_name": "mobile_cube", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", "place" ], - "tasks": "Pulling up the zipper on the transparent plastic bag with the left gripper to close it", + "tasks": "End", "objects": [ { "object_name": "table", @@ -83705,27 +93725,27 @@ "level5": null }, { - "object_name": "document_bag", - "level1": "office_supplies", - "level2": "document_bag", + "object_name": "cube_block", + "level1": "toy", + "level2": "cube_block", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-234815", - "dataset_size": "2.7GB", + "frame_range": "0-21487", + "dataset_size": "347.9MB", "statistics": { - "total_episodes": 495, - "total_frames": 234815, - "total_tasks": 4, - "total_videos": 1485, + "total_episodes": 99, + "total_frames": 21487, + "total_tasks": 1, + "total_videos": 297, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "e06c5784-df23-4a4a-b86d-adc2bcf12bae", + "dataset_uuid": "ec621804-bf42-4f9d-971a-bbe5cb197144", "language": [ "en", "zh" @@ -83734,13 +93754,563 @@ "robotics" ], "sub_tasks": [ - "Pulling up the zipper on the transparent plastic bag with the left gripper to close it", - "Pulling up the zipper on the transparent plastic bag with the right gripper to close it", + "End", + "Pick up the small cube", + "Place the small cube on the ahead of the table", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_mobile_cube_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_mobile_cube_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "G1edu-u3_stack_bowls": { + "path": "G1edu-u3_stack_bowls", + "dataset_name": "stack_bowls", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the bowl on the right onto the bowl in the middle with right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "bowl", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-166122", + "dataset_size": "7.0GB", + "statistics": { + "total_episodes": 261, + "total_frames": 166122, + "total_tasks": 2, + "total_videos": 783, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "d27ed137-4947-4948-b818-a02094cab254", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the bowl on the right onto the bowl in the middle with right gripper", + "Grasp the bowl on the right with the right gripper", + "Grasp the bowl on the left with the left gripper", + "End", "Static", + "Abnormal", + "Place the bowl on the left onto the bowl in the middle with left gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_stack_bowls_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_stack_bowls_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "RMC-AIDA-L_fold_shorts": { + "path": "RMC-AIDA-L_fold_shorts", + "dataset_name": "fold_shorts", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "fold", + "place" + ], + "tasks": "Grab the lower left pant leg with your left hand.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "shorts", + "level1": "clothing", + "level2": "shorts", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-730046", + "dataset_size": "9.5GB", + "statistics": { + "total_episodes": 866, + "total_frames": 730046, + "total_tasks": 4, + "total_videos": 2598, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "5f973d3e-d7a6-40c1-8ecc-0a4a158505f7", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grab the lower left pant leg with your left hand.", + "Grab the lower right pant leg with your right hand.", + "Fold the bottom of the shorts upward with right gripper", + "Fold the pants from right to left with the right gripper", + "Hold the waistband with your left hand.", + "Hold the lower right waistband with your right hand.", + "Grab the waistband with your right hand.", + "Fold to the right with your left hand.", + "Press the middle of the pants with your left hand.", + "Fold upward with your left hand.", + "Fold to the left with your right hand.", + "Hold the lower left waistband with your left hand.", + "Place the folded trousers onto the center area with the right grippers", + "Place the folded trousers onto the center area with right gripper", + "Place the folded trousers onto the center area with the left grippers", + "Grasp the right lower side of the waistband of the shorts with right gripper", + "end", + "Anomaly detected.", + "Grasp the lower left leg of the shorts with left gripper", + "Fold the shorts from right to left with right gripper", + "abnormal", + "Fold the pants from left to right with the left gripper", + "Fold the bottom of the shorts upward with left gripper", + "Fold the bottom of the pants upward with both grippers", + "Adjust the pants with your right hand.", + "Fold upward with your right hand.", + "Adjust the pants with your left hand.", + "Press the middle of the pants with your right hand.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "RMC-AIDA-L_fold_shorts_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_fold_shorts_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "G1edu-u3_place_bottle_c": { + "path": "G1edu-u3_place_bottle_c", + "dataset_name": "place_bottle_c", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place" + ], + "tasks": "End", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bottle", + "level1": "container", + "level2": "bottle", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-7277", + "dataset_size": "92.3MB", + "statistics": { + "total_episodes": 29, + "total_frames": 7277, + "total_tasks": 1, + "total_videos": 29, + "total_chunks": 1, + "chunks_size": 30, + "fps": 30 + }, + "dataset_uuid": "5e6d1031-7c1e-4cd5-a371-1682b697fabd", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ "End", - "Pick up the transparent plastic bag with the left gripper", - "Pick up the transparent plastic bag with the right gripper", - "Place the transparent plastic bag", + "Place the water bottle on the table with left gripper", + "Place the water bottle on the table with right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_place_bottle_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_place_bottle_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "R1_Lite_hang_clothes": { + "path": "R1_Lite_hang_clothes", + "dataset_name": "hang_clothes", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Pick up the clothes rack on the clothesline with the right gripper", + "objects": [ + { + "object_name": "clothesline", + "level1": "tool", + "level2": "clothesline", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "clothes_hanger", + "level1": "daily_necessities", + "level2": "clothes_hanger", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "clothes", + "level1": "fabric", + "level2": "clothes", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "basket", + "level1": "container", + "level2": "basket", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-289827", + "dataset_size": "22.8GB", + "statistics": { + "total_episodes": 109, + "total_frames": 289827, + "total_tasks": 1, + "total_videos": 327, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "f7bbf41d-af0d-4f31-9158-8e7987fb7c9f", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Pick up the clothes rack on the clothesline with the right gripper", + "Hang the clothes rack on the clothesline with the right gripper", + "Take the clothes off the clothes rack with the right gripper", + "End", + "Hang the clothes on the clothes rack", + "Put clothes in the basket with the left gripper", + "Pick up the clothes rack on the clothesline", + "Hang the clothes rack on the clothesline", + "Pick up clothes from the basket", + "Take the clothes off the clothes rack", + "Hang the clothes on the clothes rack with both grippers", + "Pick up clothes from the basket with the left gripper", + "abnormal", + "Put clothes in the basket", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_hang_clothes_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_hang_clothes_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "leju_robot_hotel_services_i": { + "path": "leju_robot_hotel_services_i", + "dataset_name": "hotel_services_i", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "End", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "box", + "level1": "home_storage", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "card", + "level1": "nfc", + "level2": "card", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "sensor", + "level1": "electronic_products", + "level2": "sensor", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-33547", + "dataset_size": "2.1GB", + "statistics": { + "total_episodes": 189, + "total_frames": 33547, + "total_tasks": 1, + "total_videos": 567, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "fc45ac73-e09b-491e-9dc0-71c8c532f0a7", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Take out the room card with right gripper", + "Hand the room card to the guest with right gripper", + "Hand the room card to the target.", + "Pick up the room card from the card holder.", "null" ], "annotations": { @@ -83778,12 +94348,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Split_aloha_zip_up_the_document_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Split_aloha_zip_up_the_document_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "leju_robot_hotel_services_i_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_hotel_services_i_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "AIRBOT_MMK2_store_wet_wipes_and_bowls": { - "path": "AIRBOT_MMK2_store_wet_wipes_and_bowls", - "dataset_name": "store_wet_wipes_and_bowls", + "AIRBOT_MMK2_place_the_yellow_block": { + "path": "AIRBOT_MMK2_place_the_yellow_block", + "dataset_name": "place_the_yellow_block", "robot_type": "", "end_effector_type": [ "five_finger_hand" @@ -83791,10 +94361,10 @@ "scene_type": [], "atomic_actions": [ "grasp", - "open", - "hold" + "pick", + "lower" ], - "tasks": "Place the bowl on the plate with the right gripper", + "tasks": "Static", "objects": [ { "object_name": "table", @@ -83805,43 +94375,35 @@ "level5": null }, { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "wet_tissue", - "level1": "daily_necessities", - "level2": "wet_tissue", + "object_name": "square_block", + "level1": "toy", + "level2": "square_block", "level3": null, "level4": null, "level5": null }, { - "object_name": "square_plate", - "level1": "container", - "level2": "square_plate", + "object_name": "rubiks_cube", + "level1": "toy", + "level2": "rubiks_cube", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-13058", - "dataset_size": "425.0MB", + "frame_range": "0-10030", + "dataset_size": "501.4MB", "statistics": { - "total_episodes": 50, - "total_frames": 13058, + "total_episodes": 47, + "total_frames": 10030, "total_tasks": 1, - "total_videos": 200, + "total_videos": 188, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "de8bf908-6d6b-42fb-9298-29fa21a3fc8d", + "dataset_uuid": "5627f996-6a84-4ee5-942f-6a546d68645f", "language": [ "en", "zh" @@ -83850,12 +94412,13 @@ "robotics" ], "sub_tasks": [ - "Place the bowl on the plate with the right gripper", "Static", - "Grasp the wet wipes with the left gripper", - "Grasp the bowl with the right gripper", "End", - "Place the wet wipes into the bowl with the left gripper", + "Abnormal", + "Grasp the yellow block with left gripper", + "Grasp the yellow block on the magic cube with right gripper", + "Place the yellow block on the magic cube with left gripper", + "Place the yellow block on the table with right gripper", "null" ], "annotations": { @@ -83893,10 +94456,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_store_wet_wipes_and_bowls_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_store_wet_wipes_and_bowls_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_place_the_yellow_block_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_place_the_yellow_block_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_take_part_both_hands": { + "Agilex_Cobot_Magic_classify_objects_six": { "task_categories": [ "robotics" ], @@ -83926,11 +94489,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_take_part_both_hands", + "dataset_name": "Agilex_Cobot_Magic_classify_objects_six", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "living_room", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -83938,25 +94501,73 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "item", - "level1": "item", - "level2": "item", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "square_building_blocks", - "level1": "building_blocks", - "level2": "square_building_blocks", + "object_name": "brown_basket", + "level1": "home_storage", + "level2": "brown_basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "black_basket", + "level1": "food", + "level2": "black_basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread", + "level1": "food", + "level2": "bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange", + "level1": "food", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_lemon", + "level1": "food", + "level2": "green_lemon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pink_clear_plastic_cup", + "level1": "kitchen_supplies", + "level2": "pink_clear_plastic_cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "laundry_detergent", + "level1": "daily_necessities", + "level2": "laundry_detergent", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_necessities", + "level2": "Mmentholatum_facial_cleanser", "level3": null, "level4": null, "level5": null @@ -83964,61 +94575,147 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "take the items off the building blocks with both hands and place them on the table." + "place multiple objects separately in different baskets." ], "sub_tasks": [ { - "subtask": "Grasp the steel tube on the cube block with the right gripper", + "subtask": "Place the orange in the light basket with left gripper", "subtask_index": 0 }, { - "subtask": "Grasp the steel tube on the cube block with the left gripper", + "subtask": "Grasp the xx with the right gripper", "subtask_index": 1 }, { - "subtask": "Place the steel tube on the table with the left gripper", + "subtask": "Pick up the facial cleanser with left gripper", "subtask_index": 2 }, { - "subtask": "Place the steel tube on the table with the right gripper", + "subtask": "End", "subtask_index": 3 }, { - "subtask": "End", + "subtask": "Place the facial cleanser in the dark basket with left gripper", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "Place the XX into the basket on the left with the right gripper", "subtask_index": 5 + }, + { + "subtask": "Place the lime in the light basket with right gripper", + "subtask_index": 6 + }, + { + "subtask": "Place the laundry detergent in the dark basket with right gripper", + "subtask_index": 7 + }, + { + "subtask": "Place the orange in the light basket with right gripper", + "subtask_index": 8 + }, + { + "subtask": "Pick up the lime with left gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the XX into the basket on the right with the left gripper", + "subtask_index": 10 + }, + { + "subtask": "Place the XX into the basket on the left with the left gripper", + "subtask_index": 11 + }, + { + "subtask": "Place the XX into the basket on the right with the right gripper", + "subtask_index": 12 + }, + { + "subtask": "Grasp the xx with the left gripper", + "subtask_index": 13 + }, + { + "subtask": "Pick up the orange with left gripper", + "subtask_index": 14 + }, + { + "subtask": "Place the bread in the light basket with right gripper", + "subtask_index": 15 + }, + { + "subtask": "Pick up the laundry detergent with right gripper", + "subtask_index": 16 + }, + { + "subtask": "Abnormal", + "subtask_index": 17 + }, + { + "subtask": "Pick up the laundry detergent with left gripper", + "subtask_index": 18 + }, + { + "subtask": "Pick up the facial cleanser with right gripper", + "subtask_index": 19 + }, + { + "subtask": "Pick up the lime with right gripper", + "subtask_index": 20 + }, + { + "subtask": "Pick up the bread with right gripper", + "subtask_index": 21 + }, + { + "subtask": "Place the brown cup in the dark basket with left gripper", + "subtask_index": 22 + }, + { + "subtask": "Place the laundry detergent in the dark basket with left gripper", + "subtask_index": 23 + }, + { + "subtask": "Pick up the orange with right gripper", + "subtask_index": 24 + }, + { + "subtask": "Place the lime in the light basket with left gripper", + "subtask_index": 25 + }, + { + "subtask": "Pick up the brown cup with left gripper", + "subtask_index": 26 + }, + { + "subtask": "null", + "subtask_index": 27 } ], "atomic_actions": [ "grasp", - "pick", - "place" + "lift", + "lower" ], "robot_name": [ - "Airbot_MMK2" + "Agilex_Cobot_Magic" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -84029,23 +94726,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 6348, + "total_episodes": 199, + "total_frames": 302506, "fps": 30, - "total_tasks": 6, - "total_videos": 200, + "total_tasks": 28, + "total_videos": 597, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "239.11 MB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "3.88 GB" }, - "frame_num": 6348, - "dataset_size": "239.11 MB", - "data_structure": "Airbot_MMK2_take_part_both_hands_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 302506, + "dataset_size": "3.88 GB", + "data_structure": "Agilex_Cobot_Magic_classify_objects_six_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (187 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:49" + "train": "0:198" }, "features": { "observation.images.cam_head_rgb": { @@ -84117,33 +94814,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, "observation.state": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -84152,42 +94826,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -84196,36 +94860,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -84265,17 +94919,17 @@ }, "subtask_annotation": { "names": null, + "dtype": "int32", "shape": [ 5 - ], - "dtype": "int32" + ] }, "scene_annotation": { "names": null, + "dtype": "int32", "shape": [ 1 - ], - "dtype": "int32" + ] }, "eef_sim_pose_state": { "names": [ @@ -84292,10 +94946,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_sim_pose_action": { "names": [ @@ -84312,70 +94966,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" ], - "dtype": "int32" + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } }, "authors": { @@ -84397,14 +95111,14 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Galbot_g1_steamer_storage_baozi_g": { - "path": "Galbot_g1_steamer_storage_baozi_g", - "dataset_name": "steamer_storage_baozi_g", + "leju_robot_box_storage_parcel_b": { + "path": "leju_robot_box_storage_parcel_b", + "dataset_name": "box_storage_parcel_b", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "five_finger_hand" ], "scene_type": [], "atomic_actions": [ @@ -84412,7 +95126,7 @@ "pick", "place" ], - "tasks": "Grasp the pot lid with left gripper", + "tasks": "Place the package into the parcel locker.", "objects": [ { "object_name": "table", @@ -84423,254 +95137,43 @@ "level5": null }, { - "object_name": "baozi", - "level1": "food", - "level2": "baozi", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "steamer", - "level1": "cookware", - "level2": "steamer", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pot_lid", - "level1": "daily_necessities", - "level2": "pot_lid", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-87860", - "dataset_size": "1.6GB", - "statistics": { - "total_episodes": 89, - "total_frames": 87860, - "total_tasks": 1, - "total_videos": 267, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "fc0c05a2-5b0d-4aa4-9329-eb16047d2f03", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the pot lid with left gripper", - "Place the pot lid on the steamer with left gripper", - "End", - "Place the baozi on the steamer with right gripper", - "Grasp the baozi in the plate with right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Galbot_g1_steamer_storage_baozi_g_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Galbot_g1_steamer_storage_baozi_g_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "R1_Lite_open_and_close_curtains": { - "path": "R1_Lite_open_and_close_curtains", - "dataset_name": "open_and_close_curtains", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Pull the outeouter curtains open on both sides with the both gripper", - "objects": [ - { - "object_name": "curtains", - "level1": "clothing", - "level2": "curtains", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-226694", - "dataset_size": "8.5GB", - "statistics": { - "total_episodes": 98, - "total_frames": 226694, - "total_tasks": 1, - "total_videos": 294, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "184bad58-5213-4be2-bb93-7c6d271bf034", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Pull the outeouter curtains open on both sides with the both gripper", - "Pull the outer curtains open on both sides", - "End", - "Pull the inner curtains open on both sides", - "Close the outer curtains on both sides", - "Pull the inner curtains open on left sides with the left gripper", - "Close the right inner curtain with the right gripper", - "Pull the inner curtains open on right sides with the right gripper", - "Close the left inner curtain with the left gripper", - "Close the outeouter curtains on both sides with the both gripper", - "Close the inner curtains on both sides", - "abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_open_and_close_curtains_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_open_and_close_curtains_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "leju_robot_moving_parts_c": { - "path": "leju_robot_moving_parts_c", - "dataset_name": "moving_parts_c", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place the white part on the table with left gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "box", + "level1": "home_storage", + "level2": "box", "level3": null, "level4": null, "level5": null }, { - "object_name": "parts", - "level1": "mechanical_parts", - "level2": "parts", + "object_name": "parcel", + "level1": "container", + "level2": "parcel", "level3": null, "level4": null, "level5": null }, { - "object_name": "cabinet", - "level1": "home_storage", - "level2": "cabinet", + "object_name": "conveyor_belt", + "level1": "industrial_equipment", + "level2": "conveyor_belt", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-716756", - "dataset_size": "44.0GB", + "frame_range": "0-186311", + "dataset_size": "9.5GB", "statistics": { - "total_episodes": 490, - "total_frames": 716756, + "total_episodes": 497, + "total_frames": 186311, "total_tasks": 1, - "total_videos": 1470, + "total_videos": 1491, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "7aa945d0-afcc-41a9-bc85-6d19914ec42d", + "dataset_uuid": "5e55ad50-424b-4a78-9d00-e0d7a45f48f7", "language": [ "en", "zh" @@ -84679,18 +95182,10 @@ "robotics" ], "sub_tasks": [ - "Place the white part on the table with left gripper", - "Grasp the white part with left gripper", - "Place the gray part on the table with right gripper", - "Grasp the gray part with right gripper", - "End", - "Return to the initial position at the workbench.", - "Move the small component to the workbench.", - "Place the white part on the table with right gripper", - "Insert the small component into the corresponding slot on the workbench.", - "Pick up the small component from the shelf.", - "Move to the table behind body", - "Grasp the white part with right gripper", + "Place the package into the parcel locker.", + "Pick up the package from the inbound machine.", + "Pick up the package from the conveyor belt.", + "Place the package onto the inbound machine.", "null" ], "annotations": { @@ -84728,10 +95223,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "leju_robot_moving_parts_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_moving_parts_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "leju_robot_box_storage_parcel_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_box_storage_parcel_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Agilex_Cobot_Magic_close_drawer_top": { + "Airbot_MMK2_swap_sponge_paper_box_plate": { "task_categories": [ "robotics" ], @@ -84761,11 +95256,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_close_drawer_top", + "dataset_name": "Airbot_MMK2_swap_sponge_paper_box_plate", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "office_workspace", - "level2": "office", + "level1": "household", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -84773,17 +95268,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", + "object_name": "paper_boxes", "level1": "home_storage", - "level2": "table", + "level2": "paper_boxes", "level3": null, "level4": null, "level5": null }, { - "object_name": "three_layer_transparent_drawer", - "level1": "laboratory_supplies", - "level2": "three_layer_transparent_drawer", + "object_name": "cleaning_sponge", + "level1": "daily_necessities", + "level2": "cleaning_sponge", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", "level3": null, "level4": null, "level5": null @@ -84791,50 +95294,69 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "close the top drawer." + "take the sponge out of the plate with left hand and put the box in with right hand." ], "sub_tasks": [ { - "subtask": "End", + "subtask": "Place the sponge on the table with the left gripper", "subtask_index": 0 }, { - "subtask": "Push the top drawer closed", + "subtask": "Grasp the mouse box with the right gripper", "subtask_index": 1 }, { - "subtask": "Use the right gripper to touch the topmost layer of the storage cabinet", + "subtask": "Abnormal", "subtask_index": 2 }, { - "subtask": "null", + "subtask": "Static", "subtask_index": 3 + }, + { + "subtask": "Place the mouse box into the plate with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "End", + "subtask_index": 5 + }, + { + "subtask": "Grasp the sponge in the plate with the left gripper", + "subtask_index": 6 + }, + { + "subtask": "null", + "subtask_index": 7 } ], "atomic_actions": [ "grasp", - "push" + "pick", + "place" ], "robot_name": [ - "Agilex_Cobot_Magic" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -84846,20 +95368,20 @@ ], "statistics": { "total_episodes": 50, - "total_frames": 9800, + "total_frames": 8731, "fps": 30, - "total_tasks": 4, - "total_videos": 150, + "total_tasks": 8, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "96.54 MB" + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "272.24 MB" }, - "frame_num": 9800, - "dataset_size": "96.54 MB", - "data_structure": "Agilex_Cobot_Magic_close_drawer_top_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 8731, + "dataset_size": "272.24 MB", + "data_structure": "Airbot_MMK2_swap_sponge_paper_box_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { "train": "0:49" }, @@ -84933,10 +95455,33 @@ "has_audio": false } }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, "observation.state": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -84945,32 +95490,42 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -84979,26 +95534,36 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -85038,17 +95603,17 @@ }, "subtask_annotation": { "names": null, - "dtype": "int32", "shape": [ 5 - ] + ], + "dtype": "int32" }, "scene_annotation": { "names": null, - "dtype": "int32", "shape": [ 1 - ] + ], + "dtype": "int32" }, "eef_sim_pose_state": { "names": [ @@ -85065,10 +95630,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_sim_pose_action": { "names": [ @@ -85085,130 +95650,70 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] - }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" ], - "dtype": "int32", - "shape": [ - 2 - ] + "dtype": "float32" }, - "eef_direction_action": { + "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "dtype": "int32", "shape": [ 2 - ] - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" ], - "dtype": "int32", - "shape": [ - 2 - ] + "dtype": "int32" }, - "gripper_mode_action": { + "eef_direction_action": { "names": [ - "left_gripper_mode", - "right_gripper_mode" + "left_eef_direction", + "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, - "gripper_activity_state": { + "eef_velocity_state": { "names": [ - "left_gripper_activity", - "right_gripper_activity" + "left_eef_velocity", + "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, - "gripper_activity_action": { + "eef_velocity_action": { "names": [ - "left_gripper_activity", - "right_gripper_activity" + "left_eef_velocity", + "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, - "gripper_open_scale_state": { + "eef_acc_mag_state": { "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" + "left_eef_acc_mag", + "right_eef_acc_mag" ], - "dtype": "float32", "shape": [ 2 - ] + ], + "dtype": "int32" }, - "gripper_open_scale_action": { + "eef_acc_mag_action": { "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" + "left_eef_acc_mag", + "right_eef_acc_mag" ], - "dtype": "float32", "shape": [ 2 - ] + ], + "dtype": "int32" } }, "authors": { @@ -85230,11 +95735,11 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "AIRBOT_MMK2_mobile_phone_storage": { - "path": "AIRBOT_MMK2_mobile_phone_storage", - "dataset_name": "mobile_phone_storage", + "AIRBOT_MMK2_storage_spoon": { + "path": "AIRBOT_MMK2_storage_spoon", + "dataset_name": "storage_spoon", "robot_type": "", "end_effector_type": [ "five_finger_hand" @@ -85242,10 +95747,10 @@ "scene_type": [], "atomic_actions": [ "grasp", - "pick", - "place" + "place", + "pick" ], - "tasks": "End", + "tasks": "Static", "objects": [ { "object_name": "table", @@ -85256,35 +95761,35 @@ "level5": null }, { - "object_name": "mobile_phone", - "level1": "communication_supplies", - "level2": "mobile_phone", + "object_name": "spoon", + "level1": "tableware", + "level2": "spoon", "level3": null, "level4": null, "level5": null }, { - "object_name": "calculator_box", + "object_name": "basin", "level1": "container", - "level2": "calculator_box", + "level2": "basin", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-9492", - "dataset_size": "496.3MB", + "frame_range": "0-12204", + "dataset_size": "363.2MB", "statistics": { "total_episodes": 49, - "total_frames": 9492, + "total_frames": 12204, "total_tasks": 1, "total_videos": 196, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "ae74a19b-7e4c-4f10-b378-faa014a9f006", + "dataset_uuid": "d4290af8-7a4d-4868-8a9b-2962225648e8", "language": [ "en", "zh" @@ -85293,13 +95798,12 @@ "robotics" ], "sub_tasks": [ - "End", - "Place the telephone on the table with the right gripper", "Static", - "Place the telephone on the box with the left gripper", - "Grasp the telephone with the left gripper", - "Grasp the telephone with the right gripper", - "Abnormal", + "Grasp the spoon with the right gripper", + "End", + "Grasp the spoon with the left gripper", + "Place the spoon into the basin with the right gripper", + "Place the spoon into the basin with the left gripper", "null" ], "annotations": { @@ -85337,10 +95841,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_mobile_phone_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_mobile_phone_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_storage_spoon_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_storage_spoon_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_take_bowl_sponge": { + "Agilex_Cobot_Magic_fold_short_sleeve_black": { "task_categories": [ "robotics" ], @@ -85370,11 +95874,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_take_bowl_sponge", + "dataset_name": "Agilex_Cobot_Magic_fold_short_sleeve_black", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "kitchen", + "level2": "bedroom", "level3": null, "level4": null, "level5": null @@ -85382,25 +95886,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "cleaning_sponge", - "level1": "cleaning", - "level2": "cleaning_sponge", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "bowl", - "level1": "bowl", - "level2": "bowl", + "object_name": "black_T-shirt", + "level1": "clothing", + "level2": "black_T-shirt", "level3": null, "level4": null, "level5": null }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "green_tray", + "level1": "kitchen_supplies", + "level2": "green_tray", "level3": null, "level4": null, "level5": null @@ -85408,65 +95912,84 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the sponge and bowl by hand and place them on the table." + "use two grippers to fold the black short sleeve, and use the left claw to place the folded black short sleeve on the tray." ], "sub_tasks": [ { - "subtask": "Place the sponge on the table with the left gripper", + "subtask": "Lift the black T-shirt with the left gripper", "subtask_index": 0 }, { - "subtask": "Abnormal", + "subtask": "Lift the black T-shirt with the right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the sponge on the white basket and with the left gripper", + "subtask": "Grasp the black T-shirt with the left gripper", "subtask_index": 2 }, { - "subtask": "Grasp the bowl on the white basket and with the right gripper", + "subtask": "Fold the black T-shirt downward with the right gripper", "subtask_index": 3 }, { - "subtask": "End", + "subtask": "Grasp the black T-shirt with the right gripper", "subtask_index": 4 }, { - "subtask": "Place the bowl on the table with the right gripper", + "subtask": "Fold the black T-shirt downward with the left gripper", "subtask_index": 5 }, { - "subtask": "null", + "subtask": "Fold the black T-shirt from right to left with right gripper", "subtask_index": 6 + }, + { + "subtask": "abnormal", + "subtask_index": 7 + }, + { + "subtask": "end", + "subtask_index": 8 + }, + { + "subtask": "Fold the black T-shirt from left to right with left gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the folded black T-shirt on the green tray with the left gripper", + "subtask_index": 10 + }, + { + "subtask": "null", + "subtask_index": 11 } ], "atomic_actions": [ "grasp", - "pick", - "place" + "lift", + "lower", + "fold" ], "robot_name": [ - "Airbot_MMK2" + "Agilex_Cobot_Magic" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -85477,23 +96000,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 42, - "total_frames": 11308, + "total_episodes": 50, + "total_frames": 76223, "fps": 30, - "total_tasks": 7, - "total_videos": 168, + "total_tasks": 12, + "total_videos": 150, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "373.70 MB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "999.69 MB" }, - "frame_num": 11308, - "dataset_size": "373.70 MB", - "data_structure": "Airbot_MMK2_take_bowl_sponge_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (30 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 76223, + "dataset_size": "999.69 MB", + "data_structure": "Agilex_Cobot_Magic_fold_short_sleeve_black_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:41" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -85565,33 +96088,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, "observation.state": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -85600,42 +96100,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -85644,36 +96134,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -85713,17 +96193,17 @@ }, "subtask_annotation": { "names": null, + "dtype": "int32", "shape": [ 5 - ], - "dtype": "int32" + ] }, "scene_annotation": { "names": null, + "dtype": "int32", "shape": [ 1 - ], - "dtype": "int32" + ] }, "eef_sim_pose_state": { "names": [ @@ -85740,10 +96220,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_sim_pose_action": { "names": [ @@ -85760,70 +96240,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", "shape": [ 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" ], - "dtype": "int32" + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } }, "authors": { @@ -85845,9 +96385,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Airbot_MMK2_rotate_cube_face": { + "Airbot_MMK2_take_cup": { "task_categories": [ "robotics" ], @@ -85877,11 +96417,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_rotate_cube_face", + "dataset_name": "Airbot_MMK2_take_cup", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "study_room", + "level1": "catering", + "level2": "cafe", "level3": null, "level4": null, "level5": null @@ -85889,9 +96429,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "rubik's_cube", - "level1": "toys", - "level2": "rubik's_cube", + "object_name": "cup", + "level1": "cups", + "level2": "cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "lid", + "level1": "storage_utensils", + "level2": "lid", "level3": null, "level4": null, "level5": null @@ -85899,47 +96447,39 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the rubik's cube with left hand, rotate it once with right hand, and then put it down with left hand." + "take the cup off the white lid and place them on the table by hands." ], "sub_tasks": [ { - "subtask": "Adjust the rubik's cube with the left gripper", + "subtask": "place the cup in the table use the left gripper", "subtask_index": 0 }, { - "subtask": "Grasp the rubik's cube with the left gripper", + "subtask": "place the cup in the table use the right gripper", "subtask_index": 1 }, { - "subtask": "End", + "subtask": "Grasp the cup the left gripper", "subtask_index": 2 }, { - "subtask": "Place the rubik's cube on the table with the left gripper", + "subtask": "Grasp the cup the right gripper", "subtask_index": 3 }, { - "subtask": "Move the rightmost combination block of the rubik's cube with the right gripper", + "subtask": "End", "subtask_index": 4 }, - { - "subtask": "Abnormal", - "subtask_index": 5 - }, - { - "subtask": "Lift up the rubik's cube with the left gripper", - "subtask_index": 6 - }, { "subtask": "null", - "subtask_index": 7 + "subtask_index": 5 } ], "atomic_actions": [ - "grasp", - "pick", + "pinch", "place", - "filp" + "clip", + "takeout" ], "robot_name": [ "Airbot_MMK2" @@ -85973,23 +96513,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 145, - "total_frames": 55057, + "total_episodes": 35, + "total_frames": 5435, "fps": 30, - "total_tasks": 8, - "total_videos": 580, + "total_tasks": 6, + "total_videos": 140, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "2.13 GB" + "dataset_size": "169.73 MB" }, - "frame_num": 55057, - "dataset_size": "2.13 GB", - "data_structure": "Airbot_MMK2_rotate_cube_face_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (133 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 5435, + "dataset_size": "169.73 MB", + "data_structure": "Airbot_MMK2_take_cup_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (23 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:144" + "train": "0:34" }, "features": { "observation.images.cam_head_rgb": { @@ -86343,7 +96883,104 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_storage_tumbler_umbrella": { + "Cobot_Magic_mobile_cube_blackboard": { + "path": "Cobot_Magic_mobile_cube_blackboard", + "dataset_name": "mobile_cube_blackboard", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the block at the correct position to assemble the letter S", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cube_block", + "level1": "toy", + "level2": "cube_block", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-191398", + "dataset_size": "3.4GB", + "statistics": { + "total_episodes": 100, + "total_frames": 191398, + "total_tasks": 1, + "total_videos": 300, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "679f07f9-efb6-4a77-9347-2423da9981cd", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the block at the correct position to assemble the letter S", + "out of view", + "End", + "Pick up the block on the table", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_mobile_cube_blackboard_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_mobile_cube_blackboard_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_storage_mango_pomegranate": { "task_categories": [ "robotics" ], @@ -86373,11 +97010,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_tumbler_umbrella", + "dataset_name": "Airbot_MMK2_storage_mango_pomegranate", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "living_room", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -86385,33 +97022,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "basket", - "level1": "home_storage", - "level2": "basket", + "object_name": "pomegranate", + "level1": "fruit", + "level2": "pomegranate", "level3": null, "level4": null, "level5": null }, { - "object_name": "umbrella", - "level1": "tool", - "level2": "umbrella", + "object_name": "mango", + "level1": "fruit", + "level2": "mango", "level3": null, "level4": null, "level5": null }, { - "object_name": "tumbler", - "level1": "container", - "level2": "tumbler", + "object_name": "storage_box", + "level1": "storage_utensils", + "level2": "storage_box", "level3": null, "level4": null, "level5": null @@ -86419,19 +97048,19 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the umbrella with left hand and put it in the basket, then use right hand to pick up the thermos and put it in the basket." + "pick up the pomegranate with left hand and put it in the storage box, and pick up the mango with right hand and put it in the storage box." ], "sub_tasks": [ { - "subtask": "Place the cup into the white basket with the right gripper", + "subtask": "Place the pomegranate into the left compartment of the storage box with the left gripper", "subtask_index": 0 }, { - "subtask": "Grasp the umbrella with the left gripper", + "subtask": "Grasp the pomegranate with the left gripper", "subtask_index": 1 }, { - "subtask": "Place the umbrella into the white basket with the left gripper", + "subtask": "Grasp a mango with the right gripper", "subtask_index": 2 }, { @@ -86439,16 +97068,28 @@ "subtask_index": 3 }, { - "subtask": "End", + "subtask": "Place the pomegranate into the left compartment of the storage box with the left gripper", "subtask_index": 4 }, { - "subtask": "Grasp the cup with the right gripper", + "subtask": "Grasp a pomegranate with the left gripper", "subtask_index": 5 }, { - "subtask": "null", + "subtask": "Grasp the mango with the right gripper", "subtask_index": 6 + }, + { + "subtask": "End", + "subtask_index": 7 + }, + { + "subtask": "Place the mango into the right compartment of the storage box with the right gripper", + "subtask_index": 8 + }, + { + "subtask": "null", + "subtask_index": 9 } ], "atomic_actions": [ @@ -86488,23 +97129,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 48, - "total_frames": 14373, + "total_episodes": 90, + "total_frames": 28506, "fps": 30, - "total_tasks": 7, - "total_videos": 192, + "total_tasks": 10, + "total_videos": 360, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "542.57 MB" + "dataset_size": "895.99 MB" }, - "frame_num": 14373, - "dataset_size": "542.57 MB", - "data_structure": "Airbot_MMK2_storage_tumbler_umbrella_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 28506, + "dataset_size": "895.99 MB", + "data_structure": "Airbot_MMK2_Airbot_MMK2_storage_mango_pomegranate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (78 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:47" + "train": "0:89" }, "features": { "observation.images.cam_head_rgb": { @@ -86858,9 +97499,9 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Cobot_Magic_take_out_a_pen_from_the_pen_holder": { - "path": "Cobot_Magic_take_out_a_pen_from_the_pen_holder", - "dataset_name": "take_out_a_pen_from_the_pen_holder", + "AgiBot-g1_box_storage_c": { + "path": "AgiBot-g1_box_storage_c", + "dataset_name": "box_storage_c", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -86868,10 +97509,10 @@ "scene_type": [], "atomic_actions": [ "grasp", - "pick", - "place" + "place", + "pick" ], - "tasks": "Place the pen on the table with the left gripper", + "tasks": "Pick up the mouse and power cord box.", "objects": [ { "object_name": "table", @@ -86882,43 +97523,27 @@ "level5": null }, { - "object_name": "pen", - "level1": "office_supplies", - "level2": "pen", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pen_holder", - "level1": "office_supplies", - "level2": "pen_holder", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tablecloth", - "level1": "fabric", - "level2": "tablecloth", + "object_name": "box", + "level1": "container", + "level2": "box", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-225692", - "dataset_size": "6.5GB", + "operation_platform_height": null, + "frame_range": "0-23361", + "dataset_size": "10.2GB", "statistics": { - "total_episodes": 488, - "total_frames": 225692, - "total_tasks": 5, - "total_videos": 1464, + "total_episodes": 39, + "total_frames": 23361, + "total_tasks": 1, + "total_videos": 312, "total_chunks": 1, "chunks_size": 1000, - "fps": 50 + "fps": 30 }, - "dataset_uuid": "836047ba-9812-4f07-b6bd-1d98e5c01c1d", + "dataset_uuid": "a55ac5d2-061f-4ff3-9670-41aef4e03037", "language": [ "en", "zh" @@ -86927,14 +97552,12 @@ "robotics" ], "sub_tasks": [ - "Place the pen on the table with the left gripper", - "Abnormal", + "Pick up the mouse and power cord box.", + "Place the mouse and power cord box into the container.", "End", - "Pick up the pen with the left gripper", - "Pick up the pen from the pen holder", - "Place the pen on the table with the right gripper", - "Pick up the pen with the right gripper", - "Place the pen on the table", + "Place the paper box in the another big box", + "Abnormal", + "Grasp the paper box", "null" ], "annotations": { @@ -86972,382 +97595,63 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Cobot_Magic_take_out_a_pen_from_the_pen_holder_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_take_out_a_pen_from_the_pen_holder_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AgiBot-g1_box_storage_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_box_storage_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "R1_Lite_move_the_position_of_the_long_bread": { - "path": "R1_Lite_move_the_position_of_the_long_bread", - "dataset_name": "move_the_position_of_the_long_bread", + "R1_Lite_open_and_close_nightstand_drawer": { + "path": "R1_Lite_open_and_close_nightstand_drawer", + "dataset_name": "open_and_close_nightstand_drawer", "robot_type": "", "end_effector_type": [ "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ - "place", + "grasp", "pick", - "grasp" - ], - "tasks": "Grasp the long bread with right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pen", - "level1": "office_supplies", - "level2": "pen", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "fruit", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bath_ball", - "level1": "daily_necessities", - "level2": "bath_ball", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "can", - "level1": "container", - "level2": "can", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "eraser", - "level1": "office_supplies", - "level2": "eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "hard_cleanser", - "level1": "daily_necessities", - "level2": "hard_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "long_bread", - "level1": "food", - "level2": "long_bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peeler", - "level1": "tool", - "level2": "peeler", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "block", - "level1": "toy", - "level2": "block", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "duck", - "level1": "toy", - "level2": "duck", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soap", - "level1": "daily_necessities", - "level2": "soap", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "container", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "basket", - "level1": "container", - "level2": "basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cola", - "level1": "drink", - "level2": "cola", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "detergent", - "level1": "daily_necessities", - "level2": "detergent", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_beater", - "level1": "electrical_appliances", - "level2": "egg_beater", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "towel", - "level1": "clothing", - "level2": "towel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "orange", - "level1": "fruit", - "level2": "orange", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peach", - "level1": "fruit", - "level2": "peach", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "marker", - "level1": "office_supplies", - "level2": "marker", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "rubiks_cube", - "level1": "toy", - "level2": "rubiks_cube", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bread_slice", - "level1": "food", - "level2": "bread_slice", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "brush", - "level1": "daily_necessities", - "level2": "brush", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yogurt", - "level1": "drink", - "level2": "yogurt", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "power_strip", - "level1": "electric_appliance", - "level2": "power_strip", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "milk", - "level1": "drink", - "level2": "milk", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soda", - "level1": "drink", - "level2": "soda", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "lime", - "level1": "fruit", - "level2": "lime", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "coffee_capsule", - "level1": "drink", - "level2": "coffee_capsule", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "dish", - "level1": "container", - "level2": "dish", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "glass", - "level1": "furniture", - "level2": "glass", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_yolk_pastry", - "level1": "food", - "level2": "egg_yolk_pastry", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "glasses_case", - "level1": "daily_necessities", - "level2": "glasses_case", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "gum", - "level1": "daily_necessities", - "level2": "gum", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tape", - "level1": "daily_necessities", - "level2": "tape", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soft_cleanser", - "level1": "daily_necessities", - "level2": "soft_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chips", - "level1": "food", - "level2": "chips", - "level3": null, - "level4": null, - "level5": null - }, + "place", + "push" + ], + "tasks": "Open the nightstand drawer", + "objects": [ { - "object_name": "chocolate", - "level1": "food", - "level2": "chocolate", + "object_name": "nightstand", + "level1": "furniture", + "level2": "nightstand", "level3": null, "level4": null, "level5": null }, { - "object_name": "cookie", - "level1": "food", - "level2": "cookie", + "object_name": "remote_control", + "level1": "electric_appliance", + "level2": "remote_control", "level3": null, "level4": null, "level5": null }, { - "object_name": "spoon", - "level1": "tableware", - "level2": "spoon", + "object_name": "tissue", + "level1": "daily_necessities", + "level2": "tissue", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-9249", - "dataset_size": "344.7MB", + "operation_platform_height": null, + "frame_range": "0-104258", + "dataset_size": "7.1GB", "statistics": { - "total_episodes": 31, - "total_frames": 9249, + "total_episodes": 77, + "total_frames": 104258, "total_tasks": 1, - "total_videos": 124, + "total_videos": 231, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "bdee9310-93d0-4808-a0ad-4ca01b25ac9f", + "dataset_uuid": "f53ef972-f453-4892-bf9c-cd8d3c22762e", "language": [ "en", "zh" @@ -87356,13 +97660,12 @@ "robotics" ], "sub_tasks": [ - "Grasp the long bread with right gripper", - "Grasp the long bread with left gripper", - "Place the long bread on the table with right gripper", - "Place the long bread on the table with left gripper", - "Abnormal", - "Static", - "End", + "Open the nightstand drawer", + "Put the remote controls on the table in the drawer one by one", + "Take the remote control out of the drawer one by one and place them on the table", + "Take the tissue out of the drawer one by one and place them on the table", + "Close the nightstand drawer", + "Put the tissue on the table in the drawer one by one", "null" ], "annotations": { @@ -87400,10 +97703,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_move_the_position_of_the_long_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_move_the_position_of_the_long_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "R1_Lite_open_and_close_nightstand_drawer_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_open_and_close_nightstand_drawer_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_take_electronics": { + "Galaxea_R1_Lite_mix_color_small_test_tube": { "task_categories": [ "robotics" ], @@ -87433,11 +97736,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_take_electronics", + "dataset_name": "Galaxea_R1_Lite_mix_color_small_test_tube", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "industry", - "level2": "factory", + "level1": "office_workspace", + "level2": "office", "level3": null, "level4": null, "level5": null @@ -87445,17 +97748,41 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "paper_boxes", - "level1": "baskets", - "level2": "paper_boxes", + "object_name": "blue_pigment", + "level1": "materials", + "level2": "blue_pigment", "level3": null, "level4": null, "level5": null }, { - "object_name": "lid", - "level1": "storage_utensils", - "level2": "lid", + "object_name": "red_pigment", + "level1": "materials", + "level2": "red_pigment", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "test_tube_rack", + "level1": "holding_utensils", + "level2": "test_tube_rack", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "small_test_tubes", + "level1": "laboratory_supplies", + "level2": "small_test_tubes", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "beaker", + "level1": "holding_utensils", + "level2": "beaker", "level3": null, "level4": null, "level5": null @@ -87463,59 +97790,96 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "take the calculator box and mouse box off the lid and place them on the table." + "pick up the test tube with red pigment and the test tube with blue pigment by grippers and pour them into the container." ], "sub_tasks": [ { - "subtask": "Abnormal", + "subtask": "Pour the blue reagent into the graduated cylinder with the left gripper", "subtask_index": 0 }, { - "subtask": "Place the mouse box on the table with the left gripper", + "subtask": "Pour the red reagent into the graduated cylinder with the left gripper", "subtask_index": 1 }, { - "subtask": "Grasp the calculator box on the white lid and with the right gripper", + "subtask": "Pour the yellow reagent into the graduated cylinder with the left gripper", "subtask_index": 2 }, { - "subtask": "Grasp the mouse box on the white lid and with the left gripper", + "subtask": "Pour the red reagent into the graduated cylinder with the right gripper", "subtask_index": 3 }, { - "subtask": "End", + "subtask": "Grasp the yellow reagent with the left gripper", "subtask_index": 4 }, { - "subtask": "Place the calculator box on the table with the right gripper", + "subtask": "Grasp the blue reagent with the left gripper", "subtask_index": 5 }, { - "subtask": "null", + "subtask": "End", "subtask_index": 6 + }, + { + "subtask": "Pour the blue reagent into the graduated cylinder with the right gripper", + "subtask_index": 7 + }, + { + "subtask": "Place the test tube into the brown cup with the left gripper", + "subtask_index": 8 + }, + { + "subtask": "Grasp the yellow reagent with the right gripper", + "subtask_index": 9 + }, + { + "subtask": "Pour the yellow reagent into the graduated cylinder with the right gripper", + "subtask_index": 10 + }, + { + "subtask": "Grasp the blue reagent with the right gripper", + "subtask_index": 11 + }, + { + "subtask": "Place the test tube into the brown cup with the right gripper", + "subtask_index": 12 + }, + { + "subtask": "Grasp the red reagent with the left gripper", + "subtask_index": 13 + }, + { + "subtask": "Grasp the red reagent with the right gripper", + "subtask_index": 14 + }, + { + "subtask": "null", + "subtask_index": 15 } ], "atomic_actions": [ "grasp", "pick", - "place" + "place", + "pour" ], "robot_name": [ - "Airbot_MMK2" + "Galaxea_R1_Lite" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -87532,30 +97896,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 49, - "total_frames": 13465, + "total_episodes": 203, + "total_frames": 147521, "fps": 30, - "total_tasks": 7, - "total_videos": 196, + "total_tasks": 16, + "total_videos": 812, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, + "state_dim": 14, + "action_dim": 14, "camera_views": 4, - "dataset_size": "400.20 MB" + "dataset_size": "6.71 GB" }, - "frame_num": 13465, - "dataset_size": "400.20 MB", - "data_structure": "Airbot_MMK2_take_electronics_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 147521, + "dataset_size": "6.71 GB", + "data_structure": "Galaxea_R1_Lite_mix_color_small_test_tube_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (191 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:48" + "train": "0:202" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -87564,8 +97928,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -87574,11 +97938,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_head_right_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -87587,8 +97951,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -87597,11 +97961,11 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -87610,8 +97974,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -87620,11 +97984,11 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -87633,8 +97997,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -87646,7 +98010,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -87661,36 +98025,14 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -87705,30 +98047,8 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "timestamp": { @@ -87879,6 +98199,66 @@ 2 ], "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" } }, "authors": { @@ -87900,135 +98280,14 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" - }, - "Cobot_Magic_take_out_the_bread": { - "path": "Cobot_Magic_take_out_the_bread", - "dataset_name": "take_out_the_bread", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place", - "pick" - ], - "tasks": "use the right arm to put a slice of bread into the plate", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bread_machine", - "level1": "kitchenware", - "level2": "bread_machine", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bread", - "level1": "food", - "level2": "bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tablecloth", - "level1": "clothing", - "level2": "tablecloth", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-333147", - "dataset_size": "15.1GB", - "statistics": { - "total_episodes": 588, - "total_frames": 333147, - "total_tasks": 6, - "total_videos": 1764, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "2fb93e01-1b87-444e-8fef-493ab51a3c3f", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "use the right arm to put a slice of bread into the plate", - "use the left arm to put a slice of bread into the empty plate", - "use the left arm to take out a slice of bread", - "use the right arm to take out a slice of bread", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_take_out_the_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_take_out_the_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Cobot_Magic_mobile_cube": { - "path": "Cobot_Magic_mobile_cube", - "dataset_name": "mobile_cube", + "leju_robot_hotel_services_aa": { + "path": "leju_robot_hotel_services_aa", + "dataset_name": "hotel_services_aa", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "five_finger_hand" ], "scene_type": [], "atomic_actions": [ @@ -88047,224 +98306,43 @@ "level5": null }, { - "object_name": "cube_block", - "level1": "toy", - "level2": "cube_block", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-21487", - "dataset_size": "347.9MB", - "statistics": { - "total_episodes": 99, - "total_frames": 21487, - "total_tasks": 1, - "total_videos": 297, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "ec621804-bf42-4f9d-971a-bbe5cb197144", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "End", - "Pick up the small cube", - "Place the small cube on the ahead of the table", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_mobile_cube_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_mobile_cube_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "G1edu-u3_stack_bowls": { - "path": "G1edu-u3_stack_bowls", - "dataset_name": "stack_bowls", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place the bowl on the right onto the bowl in the middle with right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "box", + "level1": "home_storage", + "level2": "box", "level3": null, "level4": null, "level5": null }, { - "object_name": "bowl", - "level1": "bowl", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-166122", - "dataset_size": "7.0GB", - "statistics": { - "total_episodes": 261, - "total_frames": 166122, - "total_tasks": 2, - "total_videos": 783, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "d27ed137-4947-4948-b818-a02094cab254", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the bowl on the right onto the bowl in the middle with right gripper", - "Grasp the bowl on the right with the right gripper", - "Grasp the bowl on the left with the left gripper", - "End", - "Static", - "Abnormal", - "Place the bowl on the left onto the bowl in the middle with left gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_stack_bowls_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_stack_bowls_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "RMC-AIDA-L_fold_shorts": { - "path": "RMC-AIDA-L_fold_shorts", - "dataset_name": "fold_shorts", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "fold", - "place" - ], - "tasks": "Grab the lower left pant leg with your left hand.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "card", + "level1": "nfc", + "level2": "card", "level3": null, "level4": null, "level5": null }, { - "object_name": "shorts", - "level1": "clothing", - "level2": "shorts", + "object_name": "sensor", + "level1": "electronic_products", + "level2": "sensor", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-730046", - "dataset_size": "9.5GB", + "operation_platform_height": null, + "frame_range": "0-70570", + "dataset_size": "4.5GB", "statistics": { - "total_episodes": 866, - "total_frames": 730046, - "total_tasks": 4, - "total_videos": 2598, + "total_episodes": 419, + "total_frames": 70570, + "total_tasks": 1, + "total_videos": 1257, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "5f973d3e-d7a6-40c1-8ecc-0a4a158505f7", + "dataset_uuid": "dcd0025f-866e-4a2c-8898-6bf7dc424bc2", "language": [ "en", "zh" @@ -88273,34 +98351,11 @@ "robotics" ], "sub_tasks": [ - "Grab the lower left pant leg with your left hand.", - "Grab the lower right pant leg with your right hand.", - "Fold the bottom of the shorts upward with right gripper", - "Fold the pants from right to left with the right gripper", - "Hold the waistband with your left hand.", - "Hold the lower right waistband with your right hand.", - "Grab the waistband with your right hand.", - "Fold to the right with your left hand.", - "Press the middle of the pants with your left hand.", - "Fold upward with your left hand.", - "Fold to the left with your right hand.", - "Hold the lower left waistband with your left hand.", - "Place the folded trousers onto the center area with the right grippers", - "Place the folded trousers onto the center area with right gripper", - "Place the folded trousers onto the center area with the left grippers", - "Grasp the right lower side of the waistband of the shorts with right gripper", - "end", - "Anomaly detected.", - "Grasp the lower left leg of the shorts with left gripper", - "Fold the shorts from right to left with right gripper", - "abnormal", - "Fold the pants from left to right with the left gripper", - "Fold the bottom of the shorts upward with left gripper", - "Fold the bottom of the pants upward with both grippers", - "Adjust the pants with your right hand.", - "Fold upward with your right hand.", - "Adjust the pants with your left hand.", - "Press the middle of the pants with your right hand.", + "End", + "Take out the room card with right gripper", + "Hand the room card to the guest with right gripper", + "Hand the room card to the target.", + "Pick up the room card from the card holder.", "null" ], "annotations": { @@ -88338,12 +98393,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "RMC-AIDA-L_fold_shorts_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_fold_shorts_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "leju_robot_hotel_services_aa_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_hotel_services_aa_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "G1edu-u3_place_bottle_c": { - "path": "G1edu-u3_place_bottle_c", - "dataset_name": "place_bottle_c", + "G1edu-u3_pick_apple_b": { + "path": "G1edu-u3_pick_apple_b", + "dataset_name": "pick_apple_b", "robot_type": "", "end_effector_type": [ "three_finger_hand" @@ -88351,7 +98406,7 @@ "scene_type": [], "atomic_actions": [ "grasp", - "place" + "pick" ], "tasks": "End", "objects": [ @@ -88364,139 +98419,27 @@ "level5": null }, { - "object_name": "bottle", - "level1": "container", - "level2": "bottle", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-7277", - "dataset_size": "92.3MB", - "statistics": { - "total_episodes": 29, - "total_frames": 7277, - "total_tasks": 1, - "total_videos": 29, - "total_chunks": 1, - "chunks_size": 30, - "fps": 30 - }, - "dataset_uuid": "5e6d1031-7c1e-4cd5-a371-1682b697fabd", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "End", - "Place the water bottle on the table with left gripper", - "Place the water bottle on the table with right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_place_bottle_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_place_bottle_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "R1_Lite_hang_clothes": { - "path": "R1_Lite_hang_clothes", - "dataset_name": "hang_clothes", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Pick up the clothes rack on the clothesline with the right gripper", - "objects": [ - { - "object_name": "clothesline", - "level1": "tool", - "level2": "clothesline", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "clothes_hanger", - "level1": "daily_necessities", - "level2": "clothes_hanger", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "clothes", - "level1": "fabric", - "level2": "clothes", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "basket", - "level1": "container", - "level2": "basket", + "object_name": "apple", + "level1": "fruit", + "level2": "apple", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-289827", - "dataset_size": "22.8GB", + "frame_range": "0-8289", + "dataset_size": "102.4MB", "statistics": { - "total_episodes": 109, - "total_frames": 289827, + "total_episodes": 26, + "total_frames": 8289, "total_tasks": 1, - "total_videos": 327, + "total_videos": 26, "total_chunks": 1, - "chunks_size": 1000, + "chunks_size": 26, "fps": 30 }, - "dataset_uuid": "f7bbf41d-af0d-4f31-9158-8e7987fb7c9f", + "dataset_uuid": "5cb12579-7559-4cdb-b6c3-8ffb1f039b19", "language": [ "en", "zh" @@ -88505,20 +98448,9 @@ "robotics" ], "sub_tasks": [ - "Pick up the clothes rack on the clothesline with the right gripper", - "Hang the clothes rack on the clothesline with the right gripper", - "Take the clothes off the clothes rack with the right gripper", "End", - "Hang the clothes on the clothes rack", - "Put clothes in the basket with the left gripper", - "Pick up the clothes rack on the clothesline", - "Hang the clothes rack on the clothesline", - "Pick up clothes from the basket", - "Take the clothes off the clothes rack", - "Hang the clothes on the clothes rack with both grippers", - "Pick up clothes from the basket with the left gripper", - "abnormal", - "Put clothes in the basket", + "Grasp the apple and lift it to the center of the view with right gripper", + "Grasp the apple and lift it to the center of the view with left gripper", "null" ], "annotations": { @@ -88556,12 +98488,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_hang_clothes_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_hang_clothes_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "G1edu-u3_pick_apple_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_pick_apple_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "leju_robot_hotel_services_i": { - "path": "leju_robot_hotel_services_i", - "dataset_name": "hotel_services_i", + "leju_robot_hotel_services_ae": { + "path": "leju_robot_hotel_services_ae", + "dataset_name": "hotel_services_ae", "robot_type": "", "end_effector_type": [ "five_finger_hand" @@ -88572,7 +98504,7 @@ "pick", "place" ], - "tasks": "End", + "tasks": "Pick up the key card from the card box.", "objects": [ { "object_name": "table", @@ -88608,124 +98540,18 @@ } ], "operation_platform_height": null, - "frame_range": "0-33547", - "dataset_size": "2.1GB", - "statistics": { - "total_episodes": 189, - "total_frames": 33547, - "total_tasks": 1, - "total_videos": 567, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "fc45ac73-e09b-491e-9dc0-71c8c532f0a7", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "End", - "Take out the room card with right gripper", - "Hand the room card to the guest with right gripper", - "Hand the room card to the target.", - "Pick up the room card from the card holder.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_hotel_services_i_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_hotel_services_i_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AIRBOT_MMK2_place_the_yellow_block": { - "path": "AIRBOT_MMK2_place_the_yellow_block", - "dataset_name": "place_the_yellow_block", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "lower" - ], - "tasks": "Static", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "square_block", - "level1": "toy", - "level2": "square_block", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "rubiks_cube", - "level1": "toy", - "level2": "rubiks_cube", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-10030", - "dataset_size": "501.4MB", + "frame_range": "0-78394", + "dataset_size": "5.0GB", "statistics": { - "total_episodes": 47, - "total_frames": 10030, + "total_episodes": 449, + "total_frames": 78394, "total_tasks": 1, - "total_videos": 188, + "total_videos": 1347, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "5627f996-6a84-4ee5-942f-6a546d68645f", + "dataset_uuid": "91537f29-b5ac-455f-bfdf-78d671e6f66d", "language": [ "en", "zh" @@ -88734,13 +98560,8 @@ "robotics" ], "sub_tasks": [ - "Static", - "End", - "Abnormal", - "Grasp the yellow block with left gripper", - "Grasp the yellow block on the magic cube with right gripper", - "Place the yellow block on the magic cube with left gripper", - "Place the yellow block on the table with right gripper", + "Pick up the key card from the card box.", + "Hand the key card to the target.", "null" ], "annotations": { @@ -88778,10 +98599,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_place_the_yellow_block_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_place_the_yellow_block_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "leju_robot_hotel_services_ae_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_hotel_services_ae_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Agilex_Cobot_Magic_classify_objects_six": { + "agilex_cobot_magic_pass_object_left_to_right_black_tablecloth": { "task_categories": [ "robotics" ], @@ -88811,11 +98632,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_classify_objects_six", + "dataset_name": "agilex_cobot_magic_pass_object_left_to_right_black_tablecloth", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "kitchen", + "level1": "commercial_convenience", + "level2": "supermarket", "level3": null, "level4": null, "level5": null @@ -88831,195 +98652,185 @@ "level5": null }, { - "object_name": "brown_basket", - "level1": "home_storage", - "level2": "brown_basket", + "object_name": "ambrosial_yogurt", + "level1": "food", + "level2": "ambrosial_yogurt", "level3": null, "level4": null, "level5": null }, { - "object_name": "black_basket", + "object_name": "banana", "level1": "food", - "level2": "black_basket", + "level2": "banana", "level3": null, "level4": null, "level5": null }, { - "object_name": "bread", + "object_name": "long_bread", "level1": "food", - "level2": "bread", + "level2": "long_bread", "level3": null, "level4": null, "level5": null }, { - "object_name": "orange", + "object_name": "milk", "level1": "food", - "level2": "orange", + "level2": "milk", "level3": null, "level4": null, "level5": null }, { - "object_name": "green_lemon", + "object_name": "yogurt", "level1": "food", - "level2": "green_lemon", + "level2": "yogurt", "level3": null, "level4": null, "level5": null }, { - "object_name": "pink_clear_plastic_cup", - "level1": "kitchen_supplies", - "level2": "pink_clear_plastic_cup", + "object_name": "grape", + "level1": "food", + "level2": "grape", "level3": null, "level4": null, "level5": null }, { - "object_name": "laundry_detergent", - "level1": "daily_necessities", - "level2": "laundry_detergent", + "object_name": "ham_sausage", + "level1": "food", + "level2": "ham_sausage", "level3": null, "level4": null, "level5": null }, { - "object_name": "mentholatum_facial_cleanser", - "level1": "daily_necessities", - "level2": "Mmentholatum_facial_cleanser", + "object_name": "eggplant", + "level1": "food", + "level2": "eggplant", "level3": null, "level4": null, "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "place multiple objects separately in different baskets." - ], - "sub_tasks": [ - { - "subtask": "Place the orange in the light basket with left gripper", - "subtask_index": 0 - }, - { - "subtask": "Grasp the xx with the right gripper", - "subtask_index": 1 - }, - { - "subtask": "Pick up the facial cleanser with left gripper", - "subtask_index": 2 - }, - { - "subtask": "End", - "subtask_index": 3 - }, - { - "subtask": "Place the facial cleanser in the dark basket with left gripper", - "subtask_index": 4 - }, - { - "subtask": "Place the XX into the basket on the left with the right gripper", - "subtask_index": 5 - }, - { - "subtask": "Place the lime in the light basket with right gripper", - "subtask_index": 6 - }, - { - "subtask": "Place the laundry detergent in the dark basket with right gripper", - "subtask_index": 7 - }, - { - "subtask": "Place the orange in the light basket with right gripper", - "subtask_index": 8 - }, - { - "subtask": "Pick up the lime with left gripper", - "subtask_index": 9 }, { - "subtask": "Place the XX into the basket on the right with the left gripper", - "subtask_index": 10 - }, - { - "subtask": "Place the XX into the basket on the left with the left gripper", - "subtask_index": 11 - }, - { - "subtask": "Place the XX into the basket on the right with the right gripper", - "subtask_index": 12 + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the xx with the left gripper", - "subtask_index": 13 + "object_name": "eyeglass_case", + "level1": "laboratory_supplies", + "level2": "eyeglass_case", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Pick up the orange with left gripper", - "subtask_index": 14 + "object_name": "rubik's_cube", + "level1": "toys", + "level2": "rubik's_cube", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the bread in the light basket with right gripper", - "subtask_index": 15 + "object_name": "purple_trash_bag", + "level1": "trash", + "level2": "purple_trash_bag", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Pick up the laundry detergent with right gripper", - "subtask_index": 16 + "object_name": "cleanser", + "level1": "daily_necessities", + "level2": "cleanser", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Abnormal", - "subtask_index": 17 + "object_name": "bathing_in_flowers", + "level1": "daily_necessities", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Pick up the laundry detergent with left gripper", - "subtask_index": 18 + "object_name": "whiteboard_eraser", + "level1": "stationery", + "level2": "whiteboard_eraser", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Pick up the facial cleanser with right gripper", - "subtask_index": 19 + "object_name": "candle", + "level1": "daily_necessities", + "level2": "candle", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Pick up the lime with right gripper", - "subtask_index": 20 - }, + "object_name": "black_table_cloths", + "level1": "laboratory_supplies", + "level2": "black_table_cloths", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use the left gripper to pick up the item and transfer it from the left gripper to the right gripper." + ], + "sub_tasks": [ { - "subtask": "Pick up the bread with right gripper", - "subtask_index": 21 + "subtask": "Grasp the XX with the right gripper", + "subtask_index": 0 }, { - "subtask": "Place the brown cup in the dark basket with left gripper", - "subtask_index": 22 + "subtask": "Place the XX on the table with the left gripper", + "subtask_index": 1 }, { - "subtask": "Place the laundry detergent in the dark basket with left gripper", - "subtask_index": 23 + "subtask": "Place the XX on the table with the right gripper", + "subtask_index": 2 }, { - "subtask": "Pick up the orange with right gripper", - "subtask_index": 24 + "subtask": "Grasp the XX with the left gripper", + "subtask_index": 3 }, { - "subtask": "Place the lime in the light basket with left gripper", - "subtask_index": 25 + "subtask": "End", + "subtask_index": 4 }, { - "subtask": "Pick up the brown cup with left gripper", - "subtask_index": 26 + "subtask": "Pass the xx to the right gripper", + "subtask_index": 5 }, { "subtask": "null", - "subtask_index": 27 + "subtask_index": 6 } ], "atomic_actions": [ "grasp", "lift", - "lower" + "lower", + "handover", + "takeover" ], "robot_name": [ - "Agilex_Cobot_Magic" + "agilex_cobot_magic" ], "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", @@ -89048,23 +98859,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 199, - "total_frames": 302506, + "total_episodes": 98, + "total_frames": 60078, "fps": 30, - "total_tasks": 28, - "total_videos": 597, + "total_tasks": 7, + "total_videos": 294, "total_chunks": 1, "chunks_size": 1000, "state_dim": 26, "action_dim": 26, "camera_views": 3, - "dataset_size": "3.88 GB" + "dataset_size": "1.18 GB" }, - "frame_num": 302506, - "dataset_size": "3.88 GB", - "data_structure": "Agilex_Cobot_Magic_classify_objects_six_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (187 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 60078, + "dataset_size": "1.18 GB", + "data_structure": "Agilex_Cobot_Magic_pass_object_left_to_right_black_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:198" + "train": "0:97" }, "features": { "observation.images.cam_head_rgb": { @@ -89435,12 +99246,12 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "leju_robot_box_storage_parcel_b": { - "path": "leju_robot_box_storage_parcel_b", - "dataset_name": "box_storage_parcel_b", + "Cobot_Magic_clear_the_desktop": { + "path": "Cobot_Magic_clear_the_desktop", + "dataset_name": "clear_the_desktop", "robot_type": "", "end_effector_type": [ - "five_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ @@ -89448,54 +99259,46 @@ "pick", "place" ], - "tasks": "Place the package into the parcel locker.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "home_storage", - "level2": "box", + "tasks": "Place the tablecloth", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "parcel", - "level1": "container", - "level2": "parcel", + "object_name": "rag", + "level1": "clothing", + "level2": "rag", "level3": null, "level4": null, "level5": null }, { - "object_name": "conveyor_belt", - "level1": "industrial_equipment", - "level2": "conveyor_belt", + "object_name": "coffee_stains", + "level1": "garbage", + "level2": "coffee_stains", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-186311", - "dataset_size": "9.5GB", + "operation_platform_height": 77.2, + "frame_range": "0-157415", + "dataset_size": "1018.3MB", "statistics": { - "total_episodes": 497, - "total_frames": 186311, - "total_tasks": 1, - "total_videos": 1491, + "total_episodes": 293, + "total_frames": 157415, + "total_tasks": 3, + "total_videos": 879, "total_chunks": 1, "chunks_size": 1000, - "fps": 30 + "fps": 50 }, - "dataset_uuid": "5e55ad50-424b-4a78-9d00-e0d7a45f48f7", + "dataset_uuid": "1ef1c2b2-61c3-459c-a45b-332abc0bdb3e", "language": [ "en", "zh" @@ -89504,10 +99307,13 @@ "robotics" ], "sub_tasks": [ - "Place the package into the parcel locker.", - "Pick up the package from the inbound machine.", - "Pick up the package from the conveyor belt.", - "Place the package onto the inbound machine.", + "Place the tablecloth", + "Sweep the stains on the table", + "End", + "Sweep the stains on the table with the right gripper", + "Place the tablecloth with the right gripper", + "Pick up the tablecloth", + "Pick up the tablecloth with the right gripper", "null" ], "annotations": { @@ -89545,10 +99351,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "leju_robot_box_storage_parcel_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_box_storage_parcel_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Cobot_Magic_clear_the_desktop_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_clear_the_desktop_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_swap_sponge_paper_box_plate": { + "Airbot_MMK2_swap_apple_cake_plate": { "task_categories": [ "robotics" ], @@ -89578,7 +99384,7 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_swap_sponge_paper_box_plate", + "dataset_name": "Airbot_MMK2_swap_apple_cake_plate", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", @@ -89590,25 +99396,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "paper_boxes", - "level1": "home_storage", - "level2": "paper_boxes", + "object_name": "cake", + "level1": "bread", + "level2": "cake", "level3": null, "level4": null, "level5": null }, { - "object_name": "cleaning_sponge", - "level1": "daily_necessities", - "level2": "cleaning_sponge", + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", + "object_name": "apple", + "level1": "fruit", + "level2": "apple", "level3": null, "level4": null, "level5": null @@ -89616,40 +99422,32 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "take the sponge out of the plate with left hand and put the box in with right hand." + "take the cake out of the plate and put the apples in." ], "sub_tasks": [ { - "subtask": "Place the sponge on the table with the left gripper", + "subtask": "Grasp the cake placed into the plate with the left gripper", "subtask_index": 0 }, { - "subtask": "Grasp the mouse box with the right gripper", + "subtask": "Grasp the apple with the right gripper", "subtask_index": 1 }, { - "subtask": "Abnormal", + "subtask": "End", "subtask_index": 2 }, { - "subtask": "Static", + "subtask": "Place the cake on the table with the left gripper", "subtask_index": 3 }, { - "subtask": "Place the mouse box into the plate with the right gripper", + "subtask": "Place the apple into the plate with the right gripper", "subtask_index": 4 }, - { - "subtask": "End", - "subtask_index": 5 - }, - { - "subtask": "Grasp the sponge in the plate with the left gripper", - "subtask_index": 6 - }, { "subtask": "null", - "subtask_index": 7 + "subtask_index": 5 } ], "atomic_actions": [ @@ -89689,23 +99487,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 8731, + "total_episodes": 47, + "total_frames": 6775, "fps": 30, - "total_tasks": 8, - "total_videos": 200, + "total_tasks": 6, + "total_videos": 188, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "272.24 MB" + "dataset_size": "182.07 MB" }, - "frame_num": 8731, - "dataset_size": "272.24 MB", - "data_structure": "Airbot_MMK2_swap_sponge_paper_box_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 6775, + "dataset_size": "182.07 MB", + "data_structure": "Airbot_MMK2_swap_apple_cake_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:49" + "train": "0:46" }, "features": { "observation.images.cam_head_rgb": { @@ -90059,114 +99857,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "AIRBOT_MMK2_storage_spoon": { - "path": "AIRBOT_MMK2_storage_spoon", - "dataset_name": "storage_spoon", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place", - "pick" - ], - "tasks": "Static", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "spoon", - "level1": "tableware", - "level2": "spoon", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "basin", - "level1": "container", - "level2": "basin", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-12204", - "dataset_size": "363.2MB", - "statistics": { - "total_episodes": 49, - "total_frames": 12204, - "total_tasks": 1, - "total_videos": 196, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "d4290af8-7a4d-4868-8a9b-2962225648e8", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Static", - "Grasp the spoon with the right gripper", - "End", - "Grasp the spoon with the left gripper", - "Place the spoon into the basin with the right gripper", - "Place the spoon into the basin with the left gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_storage_spoon_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_storage_spoon_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Airbot_MMK2_take_cup": { + "Agilex_Cobot_Magic_move_mouse_pen": { "task_categories": [ "robotics" ], @@ -90196,11 +99887,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_take_cup", + "dataset_name": "Agilex_Cobot_Magic_move_mouse_pen", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "catering", - "level2": "cafe", + "level1": "office & workspace", + "level2": "office", "level3": null, "level4": null, "level5": null @@ -90208,17 +99899,49 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "cup", - "level1": "cups", - "level2": "cup", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "lid", - "level1": "storage_utensils", - "level2": "lid", + "object_name": "deli_water-based_marker", + "level1": "stationery", + "level2": "deli_water-based_marker", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "notebook", + "level1": "stationery", + "level2": "notebook", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mouse", + "level1": "appliances", + "level2": "mouse", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mouse_pad", + "level1": "appliances", + "level2": "mouse_pad", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "white_table_cloths", + "level1": "laboratory_supplies", + "level2": "white_table_cloths", "level3": null, "level4": null, "level5": null @@ -90226,62 +99949,75 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "take the cup off the white lid and place them on the table by hands." + "the left gripper organize the pen on the notebook then the right gripper organize the mouse on the mouse pad." ], "sub_tasks": [ { - "subtask": "place the cup in the table use the left gripper", + "subtask": "Grasp the mouse with the left gripper", "subtask_index": 0 }, { - "subtask": "place the cup in the table use the right gripper", + "subtask": "Grasp the marker with the left gripper", "subtask_index": 1 }, { - "subtask": "Grasp the cup the left gripper", + "subtask": "Grasp the mouse with the right gripper", "subtask_index": 2 }, { - "subtask": "Grasp the cup the right gripper", + "subtask": "Place the mouse on the mouse pad with the right gripper", "subtask_index": 3 }, { - "subtask": "End", + "subtask": "Place the marker on the notebook with the right gripper", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "Grasp the marker with the right gripper", "subtask_index": 5 + }, + { + "subtask": "end", + "subtask_index": 6 + }, + { + "subtask": "Place the mouse on the mouse pad with the left gripper", + "subtask_index": 7 + }, + { + "subtask": "Place the marker on the notebook with the left gripper", + "subtask_index": 8 + }, + { + "subtask": "null", + "subtask_index": 9 } ], "atomic_actions": [ - "pinch", - "place", - "clip", - "takeout" + "grasp", + "lift", + "lower" ], "robot_name": [ - "Airbot_MMK2" + "Agilex_Cobot_Magic" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -90292,23 +100028,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 35, - "total_frames": 5435, + "total_episodes": 30, + "total_frames": 22756, "fps": 30, - "total_tasks": 6, - "total_videos": 140, + "total_tasks": 10, + "total_videos": 90, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "169.73 MB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "276.67 MB" }, - "frame_num": 5435, - "dataset_size": "169.73 MB", - "data_structure": "Airbot_MMK2_take_cup_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (23 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 22756, + "dataset_size": "276.67 MB", + "data_structure": "Agilex_Cobot_Magic_move_mouse_pen_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (18 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:34" + "train": "0:29" }, "features": { "observation.images.cam_head_rgb": { @@ -90380,33 +100116,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, "observation.state": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -90415,42 +100128,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -90459,36 +100162,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -90620,54 +100313,689 @@ ], "dtype": "int32" }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, + "leju_robot_moving_parts_h": { + "path": "leju_robot_moving_parts_h", + "dataset_name": "moving_parts_h", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Grasp the gray part with right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "parts", + "level1": "mechanical_parts", + "level2": "parts", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cabinet", + "level1": "home_storage", + "level2": "cabinet", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-234391", + "dataset_size": "14.5GB", + "statistics": { + "total_episodes": 162, + "total_frames": 234391, + "total_tasks": 1, + "total_videos": 486, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "27a39264-c303-45a1-9046-1419479116fb", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the gray part with right gripper", + "Insert the small component into the corresponding slot on the workbench.", + "Move to the table behind body", + "Grasp the white part with right gripper", + "Abnormal", + "Move the small component to the workbench.", + "Place the gray part on the table with right gripper", + "Place the white part on the table with right gripper", + "End", + "Pick up the small component from the shelf.", + "Return to the initial position at the workbench.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_moving_parts_h_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_moving_parts_h_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "AIRBOT_MMK2_beauty_sponge_and_cake_to_place": { + "path": "AIRBOT_MMK2_beauty_sponge_and_cake_to_place", + "dataset_name": "beauty_sponge_and_cake_to_place", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Static", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "beauty_blender", + "level1": "tools", + "level2": "beauty_blender", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cake", + "level1": "food", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "container", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-8096", + "dataset_size": "394.9MB", + "statistics": { + "total_episodes": 50, + "total_frames": 8096, + "total_tasks": 1, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "c4b8c5b1-7a57-48f3-9074-c2a019dbfd16", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Static", + "End", + "Grasp the bullet into the plate with the left gripper", + "Grasp the cake on the table with the right gripper", + "Place the bullet on the table with the left gripper", + "Abnormal", + "Place the cake into the plate with the right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AIRBOT_MMK2_beauty_sponge_and_cake_to_place_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_beauty_sponge_and_cake_to_place_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "G1edu-u3_pick_up_the_bottled_water_a": { + "path": "G1edu-u3_pick_up_the_bottled_water_a", + "dataset_name": "pick_up_the_bottled_water_a", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick" + ], + "tasks": "Grasp the water bottle and lift it to the center of the view with right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bottle", + "level1": "container", + "level2": "bottle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "water", + "level1": "beverages", + "level2": "water", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-12057", + "dataset_size": "153.0MB", + "statistics": { + "total_episodes": 29, + "total_frames": 12057, + "total_tasks": 1, + "total_videos": 29, + "total_chunks": 1, + "chunks_size": 29, + "fps": 30 + }, + "dataset_uuid": "e56a77d6-dbe5-4d31-a397-61ccb616903c", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the water bottle and lift it to the center of the view with right gripper", + "Grasp the water bottle and lift it to the center of the view with left gripper", + "End", + "Abnormal", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_pick_up_the_bottled_water_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_pick_up_the_bottled_water_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "G1edu-u3_basket_storage_apple": { + "path": "G1edu-u3_basket_storage_apple", + "dataset_name": "basket_storage_apple", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Grasp the apple with the left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "basket", + "level1": "home_storage", + "level2": "basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "apple", + "level1": "fruit", + "level2": "apple", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-45574", + "dataset_size": "1.5GB", + "statistics": { + "total_episodes": 88, + "total_frames": 45574, + "total_tasks": 1, + "total_videos": 264, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "ad45e74b-fccc-4f12-9f34-a2312636f61c", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the apple with the left gripper", + "Place the apple into the basket with the left gripper", + "End", + "Static", + "Move the basket to the front of the table with the right gripper", + "Grasp the basket with the right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_basket_storage_apple_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_basket_storage_apple_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "R1_Lite_open_and_close_the_freezer_door": { + "path": "R1_Lite_open_and_close_the_freezer_door", + "dataset_name": "open_and_close_the_freezer_door", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place", + "open" + ], + "tasks": "Abnormal", + "objects": [ + { + "object_name": "freezer", + "level1": "furniture", + "level2": "freezer", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ceramic_cup", + "level1": "container", + "level2": "ceramic_cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yogurt", + "level1": "food", + "level2": "yogurt", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "paper_cup", + "level1": "container", + "level2": "paper_cup", + "level3": null, + "level4": null, + "level5": null } + ], + "operation_platform_height": null, + "frame_range": "0-170369", + "dataset_size": "6.6GB", + "statistics": { + "total_episodes": 93, + "total_frames": 170369, + "total_tasks": 1, + "total_videos": 279, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "d59cd2ec-8eab-40f4-a630-29a9bbc3f309", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Abnormal", + "End", + "Place the ceramic cup on the table", + "Put the yoghurt in the freezer", + "Take the paper cup out of the freezer", + "Place the yoghurt on the table", + "Take the ceramic cup out of the freezer", + "Put the ceramic cup in the freezer", + "Take the yogurt out of the freezer", + "Open the freezer door", + "Put the paper cup in the freezer", + "Put the yogurt in the freezer", + "Place the paper cup on the table", + "Close the freezer door", + "Grasp the yogur", + "Place the yoghurt on the freezer", + "Place the yogurt on the table", + "Take the yoghurt out of the freezer", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" }, "authors": { "contributed_by": [ { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } ] }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_open_and_close_the_freezer_door_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_open_and_close_the_freezer_door_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Cobot_Magic_mobile_cube_blackboard": { - "path": "Cobot_Magic_mobile_cube_blackboard", - "dataset_name": "mobile_cube_blackboard", + "leju_robot_moving_parts_o": { + "path": "leju_robot_moving_parts_o", + "dataset_name": "moving_parts_o", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "five_finger_hand" ], "scene_type": [], "atomic_actions": [ @@ -90675,7 +101003,7 @@ "pick", "place" ], - "tasks": "Place the block at the correct position to assemble the letter S", + "tasks": "Return to the initial position at the shelf", "objects": [ { "object_name": "table", @@ -90686,27 +101014,35 @@ "level5": null }, { - "object_name": "cube_block", - "level1": "toy", - "level2": "cube_block", + "object_name": "parts", + "level1": "mechanical_parts", + "level2": "parts", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cabinet", + "level1": "home_storage", + "level2": "cabinet", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-191398", - "dataset_size": "3.4GB", + "operation_platform_height": null, + "frame_range": "0-745064", + "dataset_size": "51.1GB", "statistics": { - "total_episodes": 100, - "total_frames": 191398, + "total_episodes": 490, + "total_frames": 745064, "total_tasks": 1, - "total_videos": 300, + "total_videos": 1470, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "679f07f9-efb6-4a77-9347-2423da9981cd", + "dataset_uuid": "27cad5ab-323b-4157-acc6-9f7405263110", "language": [ "en", "zh" @@ -90715,10 +101051,14 @@ "robotics" ], "sub_tasks": [ - "Place the block at the correct position to assemble the letter S", - "out of view", + "Return to the initial position at the shelf", + "Grasp the black part with right gripper", "End", - "Pick up the block on the table", + "Pick up the large material from the shelf", + "Place the black part on the table with right gripper", + "Move to the table behind body", + "Move the large material to the workbench", + "Insert the large material into the corresponding slot on the workbench", "null" ], "annotations": { @@ -90756,542 +101096,447 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Cobot_Magic_mobile_cube_blackboard_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_mobile_cube_blackboard_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "leju_robot_moving_parts_o_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_moving_parts_o_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_storage_mango_pomegranate": { + "leju_robot_part_placement": { + "path": "leju_robot_part_placement", + "dataset_name": "part_placement", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Insert the small parts into the corresponding slots on the workbench.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "parts", + "level1": "mechanical_parts", + "level2": "parts", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cabinet", + "level1": "home_storage", + "level2": "cabinet", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-796570", + "dataset_size": "50.4GB", + "statistics": { + "total_episodes": 538, + "total_frames": 796570, + "total_tasks": 1, + "total_videos": 1614, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "41b426f1-987c-4320-8263-5f89139cf899", + "language": [ + "en", + "zh" + ], "task_categories": [ "robotics" ], - "language": [ - "en" + "sub_tasks": [ + "Insert the small parts into the corresponding slots on the workbench.", + "Move the large parts to the workbench.", + "Pick up the large parts from the shelf.", + "Grasp the black part with right gripper", + "Grasp the gray part with right gripper", + "End", + "Return to the initial position at the workbench.", + "Place the gray part on the table with right gripper", + "Place the black part on the table with right gripper", + "Move to the table behind body", + "Insert the large parts into the corresponding slots on the workbench.", + "Return to the initial position at the shelf.", + "Move the small parts to the workbench.", + "Pick up the small parts from the shelf.", + "null" ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", "tags": [ "RoboCOIN", "LeRobot" ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_part_placement_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_part_placement_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Cobot_Magic_move_the_ball": { + "path": "Cobot_Magic_move_the_ball", + "dataset_name": "move_the_ball", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_mango_pomegranate", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "household", - "level2": "kitchen", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Put the picked-up object on the table.", "objects": [ { - "object_name": "pomegranate", - "level1": "fruit", - "level2": "pomegranate", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "mango", - "level1": "fruit", - "level2": "mango", + "object_name": "ball", + "level1": "toy", + "level2": "ball", "level3": null, "level4": null, "level5": null }, { - "object_name": "storage_box", - "level1": "storage_utensils", - "level2": "storage_box", + "object_name": "plate", + "level1": "container", + "level2": "plate", "level3": null, "level4": null, "level5": null } ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "pick up the pomegranate with left hand and put it in the storage box, and pick up the mango with right hand and put it in the storage box." + "operation_platform_height": 77.2, + "frame_range": "0-58184", + "dataset_size": "984.3MB", + "statistics": { + "total_episodes": 100, + "total_frames": 58184, + "total_tasks": 1, + "total_videos": 300, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "371a2f88-8a95-4044-ba5d-4c287fd9b679", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" ], "sub_tasks": [ + "Put the picked-up object on the table.", + "Grab the rolling spherical object.", + "Grab the spherical object with your left arm.", + "Place the sphere on the white object.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_move_the_ball_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_move_the_ball_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "alpha_bot_2_press_the_button_a": { + "path": "alpha_bot_2_press_the_button_a", + "dataset_name": "press_the_button_a", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "pressbutton" + ], + "tasks": "End", + "objects": [ { - "subtask": "Place the pomegranate into the left compartment of the storage box with the left gripper", - "subtask_index": 0 - }, - { - "subtask": "Grasp the pomegranate with the left gripper", - "subtask_index": 1 - }, - { - "subtask": "Grasp a mango with the right gripper", - "subtask_index": 2 - }, - { - "subtask": "Static", - "subtask_index": 3 - }, - { - "subtask": "Place the pomegranate into the left compartment of the storage box with the left gripper", - "subtask_index": 4 - }, - { - "subtask": "Grasp a pomegranate with the left gripper", - "subtask_index": 5 - }, - { - "subtask": "Grasp the mango with the right gripper", - "subtask_index": 6 - }, - { - "subtask": "End", - "subtask_index": 7 + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the mango into the right compartment of the storage box with the right gripper", - "subtask_index": 8 + "object_name": "button", + "level1": "toy", + "level2": "button", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "null", - "subtask_index": 9 + "object_name": "water_bottle", + "level1": "beverages", + "level2": "water_bottle", + "level3": null, + "level4": null, + "level5": null } ], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "robot_name": [ - "Airbot_MMK2" - ], - "end_effector_type": "five_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" - ], - "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" - }, - "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" - ], + "operation_platform_height": null, + "frame_range": "0-91452", + "dataset_size": "1.9GB", "statistics": { - "total_episodes": 90, - "total_frames": 28506, - "fps": 30, - "total_tasks": 10, - "total_videos": 360, + "total_episodes": 116, + "total_frames": 91452, + "total_tasks": 1, + "total_videos": 464, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "895.99 MB" + "fps": 30 }, - "frame_num": 28506, - "dataset_size": "895.99 MB", - "data_structure": "Airbot_MMK2_Airbot_MMK2_storage_mango_pomegranate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (78 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", - "splits": { - "train": "0:89" + "dataset_uuid": "f687e0d9-d736-45a4-89f6-ef7ff90a103a", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Press the button blocked by the bottle with right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" }, - "features": { - "observation.images.cam_head_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_left_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_right_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } - }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "alpha_bot_2_press_the_button_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "alpha_bot_2_press_the_button_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "alpha_bot_2_recover_after_touching_an_obstacle": { + "path": "alpha_bot_2_recover_after_touching_an_obstacle", + "dataset_name": "recover_after_touching_an_obstacle", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick" + ], + "tasks": "Touch the bottle with left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, - "observation.state": { - "dtype": "float32", - "shape": [ - 36 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" - ] - }, - "action": { - "dtype": "float32", - "shape": [ - 36 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" - ] - }, - "timestamp": { - "dtype": "float32", - "shape": [ - 1 - ], - "names": null - }, - "frame_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "episode_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "task_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "subtask_annotation": { - "names": null, - "shape": [ - 5 - ], - "dtype": "int32" - }, - "scene_annotation": { - "names": null, - "shape": [ - 1 - ], - "dtype": "int32" - }, - "eef_sim_pose_state": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_sim_pose_action": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_direction_action": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "mineral_water", + "level1": "drinks", + "level2": "mineral_water", + "level3": null, + "level4": null, + "level5": null }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "button", + "level1": "toy", + "level2": "button", + "level3": null, + "level4": null, + "level5": null } + ], + "operation_platform_height": null, + "frame_range": "0-128391", + "dataset_size": "3.7GB", + "statistics": { + "total_episodes": 130, + "total_frames": 128391, + "total_tasks": 1, + "total_videos": 520, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "aebc9a40-0a90-4fc3-a705-fbc5c7d23392", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Touch the bottle with left gripper", + "End", + "Touch the bottle with right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" }, "authors": { "contributed_by": [ { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } ] }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "alpha_bot_2_recover_after_touching_an_obstacle_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "alpha_bot_2_recover_after_touching_an_obstacle_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "AgiBot-g1_box_storage_c": { - "path": "AgiBot-g1_box_storage_c", - "dataset_name": "box_storage_c", + "leju_robot_pass_the_cleaner_a": { + "path": "leju_robot_pass_the_cleaner_a", + "dataset_name": "pass_the_cleaner_a", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "five_finger_hand" ], "scene_type": [], "atomic_actions": [ "grasp", - "place", - "pick" + "pick", + "place" ], - "tasks": "Pick up the mouse and power cord box.", + "tasks": "Turn the bottle to the front side.", "objects": [ { "object_name": "table", @@ -91302,27 +101547,27 @@ "level5": null }, { - "object_name": "box", - "level1": "container", - "level2": "box", + "object_name": "cleaner", + "level1": "daily_necessities", + "level2": "cleaner", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-23361", - "dataset_size": "10.2GB", + "operation_platform_height": null, + "frame_range": "0-169467", + "dataset_size": "8.7GB", "statistics": { - "total_episodes": 39, - "total_frames": 23361, + "total_episodes": 457, + "total_frames": 169467, "total_tasks": 1, - "total_videos": 312, + "total_videos": 1371, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "a55ac5d2-061f-4ff3-9670-41aef4e03037", + "dataset_uuid": "870e2a78-0b89-486a-bed4-12863a5b00d6", "language": [ "en", "zh" @@ -91331,12 +101576,8 @@ "robotics" ], "sub_tasks": [ - "Pick up the mouse and power cord box.", - "Place the mouse and power cord box into the container.", - "End", - "Place the paper box in the another big box", - "Abnormal", - "Grasp the paper box", + "Turn the bottle to the front side.", + "Pick up the bottle from the table.", "null" ], "annotations": { @@ -91374,12 +101615,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AgiBot-g1_box_storage_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_box_storage_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "leju_robot_pass_the_cleaner_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_pass_the_cleaner_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "R1_Lite_open_and_close_nightstand_drawer": { - "path": "R1_Lite_open_and_close_nightstand_drawer", - "dataset_name": "open_and_close_nightstand_drawer", + "Galbot_g1_steamer_storage_baozi_a": { + "path": "Galbot_g1_steamer_storage_baozi_a", + "dataset_name": "steamer_storage_baozi_a", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -91388,49 +101629,64 @@ "atomic_actions": [ "grasp", "pick", - "place", - "push" + "place" ], - "tasks": "Open the nightstand drawer", + "tasks": "End", "objects": [ { - "object_name": "nightstand", + "object_name": "table", "level1": "furniture", - "level2": "nightstand", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "remote_control", - "level1": "electric_appliance", - "level2": "remote_control", + "object_name": "baozi", + "level1": "food", + "level2": "baozi", "level3": null, "level4": null, "level5": null }, { - "object_name": "tissue", + "object_name": "steamer", + "level1": "cookware", + "level2": "steamer", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pot_lid", "level1": "daily_necessities", - "level2": "tissue", + "level2": "pot_lid", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-104258", - "dataset_size": "7.1GB", + "frame_range": "0-250217", + "dataset_size": "4.3GB", "statistics": { - "total_episodes": 77, - "total_frames": 104258, + "total_episodes": 266, + "total_frames": 250217, "total_tasks": 1, - "total_videos": 231, + "total_videos": 798, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "f53ef972-f453-4892-bf9c-cd8d3c22762e", + "dataset_uuid": "1e479c19-9f69-42ac-9238-114fcf1a487c", "language": [ "en", "zh" @@ -91439,12 +101695,11 @@ "robotics" ], "sub_tasks": [ - "Open the nightstand drawer", - "Put the remote controls on the table in the drawer one by one", - "Take the remote control out of the drawer one by one and place them on the table", - "Take the tissue out of the drawer one by one and place them on the table", - "Close the nightstand drawer", - "Put the tissue on the table in the drawer one by one", + "End", + "Place the baozi on the steamer with right gripper", + "Place the pot lid on the steamer with left gripper", + "Grasp the pot lid with left gripper", + "Grasp the baozi in the plate with right gripper", "null" ], "annotations": { @@ -91482,10 +101737,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_open_and_close_nightstand_drawer_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_open_and_close_nightstand_drawer_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Galbot_g1_steamer_storage_baozi_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Galbot_g1_steamer_storage_baozi_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Galaxea_R1_Lite_mix_color_small_test_tube": { + "Airbot_MMK2_unplug": { "task_categories": [ "robotics" ], @@ -91515,7 +101770,7 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_mix_color_small_test_tube", + "dataset_name": "Airbot_MMK2_unplug", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "office_workspace", @@ -91527,41 +101782,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "blue_pigment", - "level1": "materials", - "level2": "blue_pigment", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "red_pigment", - "level1": "materials", - "level2": "red_pigment", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "test_tube_rack", - "level1": "holding_utensils", - "level2": "test_tube_rack", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "small_test_tubes", - "level1": "laboratory_supplies", - "level2": "small_test_tubes", + "object_name": "power_strips", + "level1": "appliances", + "level2": "power_strips", "level3": null, "level4": null, "level5": null }, { - "object_name": "beaker", - "level1": "holding_utensils", - "level2": "beaker", + "object_name": "charger", + "level1": "appliances", + "level2": "charger", "level3": null, "level4": null, "level5": null @@ -91569,96 +101800,56 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the test tube with red pigment and the test tube with blue pigment by grippers and pour them into the container." + "the left hand secures the power strip, while the right hand unplugs the charger and sets it down." ], "sub_tasks": [ { - "subtask": "Pour the blue reagent into the graduated cylinder with the left gripper", + "subtask": "End", "subtask_index": 0 }, { - "subtask": "Pour the red reagent into the graduated cylinder with the left gripper", + "subtask": "Release socket with the left hand", "subtask_index": 1 }, { - "subtask": "Pour the yellow reagent into the graduated cylinder with the left gripper", + "subtask": "Put down plug with the right hand", "subtask_index": 2 }, { - "subtask": "Pour the red reagent into the graduated cylinder with the right gripper", + "subtask": "Press and hold the socket with the left hand", "subtask_index": 3 }, { - "subtask": "Grasp the yellow reagent with the left gripper", + "subtask": "Unplug plug with the right hand", "subtask_index": 4 }, - { - "subtask": "Grasp the blue reagent with the left gripper", - "subtask_index": 5 - }, - { - "subtask": "End", - "subtask_index": 6 - }, - { - "subtask": "Pour the blue reagent into the graduated cylinder with the right gripper", - "subtask_index": 7 - }, - { - "subtask": "Place the test tube into the brown cup with the left gripper", - "subtask_index": 8 - }, - { - "subtask": "Grasp the yellow reagent with the right gripper", - "subtask_index": 9 - }, - { - "subtask": "Pour the yellow reagent into the graduated cylinder with the right gripper", - "subtask_index": 10 - }, - { - "subtask": "Grasp the blue reagent with the right gripper", - "subtask_index": 11 - }, - { - "subtask": "Place the test tube into the brown cup with the right gripper", - "subtask_index": 12 - }, - { - "subtask": "Grasp the red reagent with the left gripper", - "subtask_index": 13 - }, - { - "subtask": "Grasp the red reagent with the right gripper", - "subtask_index": 14 - }, { "subtask": "null", - "subtask_index": 15 + "subtask_index": 5 } ], "atomic_actions": [ - "grasp", - "pick", + "uncap", + "presss", "place", - "pour" + "pick" ], "robot_name": [ - "Galaxea_R1_Lite" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -91675,30 +101866,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 203, - "total_frames": 147521, + "total_episodes": 90, + "total_frames": 21487, "fps": 30, - "total_tasks": 16, - "total_videos": 812, + "total_tasks": 6, + "total_videos": 360, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, + "state_dim": 36, + "action_dim": 36, "camera_views": 4, - "dataset_size": "6.71 GB" + "dataset_size": "870.71 MB" }, - "frame_num": 147521, - "dataset_size": "6.71 GB", - "data_structure": "Galaxea_R1_Lite_mix_color_small_test_tube_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (191 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 21487, + "dataset_size": "870.71 MB", + "data_structure": "Airbot_MMK2_unplug_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (78 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:202" + "train": "0:89" }, "features": { - "observation.images.cam_head_left_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -91707,8 +101898,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -91717,11 +101908,11 @@ "has_audio": false } }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -91730,8 +101921,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -91740,11 +101931,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -91753,8 +101944,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -91763,11 +101954,11 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_front_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -91776,8 +101967,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -91789,7 +101980,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -91804,14 +101995,36 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -91826,8 +102039,30 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -91978,66 +102213,6 @@ 2 ], "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" } }, "authors": { @@ -92059,11 +102234,11 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "leju_robot_hotel_services_aa": { - "path": "leju_robot_hotel_services_aa", - "dataset_name": "hotel_services_aa", + "G1edu-u3_pullBowl_storage_bread_a": { + "path": "G1edu-u3_pullBowl_storage_bread_a", + "dataset_name": "pullBowl_storage_bread_a", "robot_type": "", "end_effector_type": [ "five_finger_hand" @@ -92074,7 +102249,7 @@ "pick", "place" ], - "tasks": "End", + "tasks": "Abnormal", "objects": [ { "object_name": "table", @@ -92085,140 +102260,35 @@ "level5": null }, { - "object_name": "box", - "level1": "home_storage", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "card", - "level1": "nfc", - "level2": "card", + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", "level3": null, "level4": null, "level5": null }, { - "object_name": "sensor", - "level1": "electronic_products", - "level2": "sensor", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-70570", - "dataset_size": "4.5GB", - "statistics": { - "total_episodes": 419, - "total_frames": 70570, - "total_tasks": 1, - "total_videos": 1257, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "dcd0025f-866e-4a2c-8898-6bf7dc424bc2", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "End", - "Take out the room card with right gripper", - "Hand the room card to the guest with right gripper", - "Hand the room card to the target.", - "Pick up the room card from the card holder.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_hotel_services_aa_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_hotel_services_aa_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "G1edu-u3_pick_apple_b": { - "path": "G1edu-u3_pick_apple_b", - "dataset_name": "pick_apple_b", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick" - ], - "tasks": "End", - "objects": [ - { - "object_name": "table", + "object_name": "plate_rack", "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "apple", - "level1": "fruit", - "level2": "apple", + "level2": "plate_rack", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-8289", - "dataset_size": "102.4MB", + "operation_platform_height": 77.2, + "frame_range": "0-126861", + "dataset_size": "2.6GB", "statistics": { - "total_episodes": 26, - "total_frames": 8289, + "total_episodes": 178, + "total_frames": 126861, "total_tasks": 1, - "total_videos": 26, + "total_videos": 534, "total_chunks": 1, - "chunks_size": 26, + "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "5cb12579-7559-4cdb-b6c3-8ffb1f039b19", + "dataset_uuid": "11ed8a7f-106a-4c08-98a4-0abdd1b97a52", "language": [ "en", "zh" @@ -92227,9 +102297,13 @@ "robotics" ], "sub_tasks": [ + "Abnormal", + "Place the long bread in pink bowl with left hand", "End", - "Grasp the apple and lift it to the center of the view with right gripper", - "Grasp the apple and lift it to the center of the view with left gripper", + "Grasp the round bread with left hand", + "Grasp the long bread with left hand", + "Place the round bread in pink bowl with left hand", + "Move the pink bowl to the center of table with right hand", "null" ], "annotations": { @@ -92267,23 +102341,23 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "G1edu-u3_pick_apple_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_pick_apple_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "G1edu-u3_pullBowl_storage_bread_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_pullBowl_storage_bread_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "leju_robot_hotel_services_ae": { - "path": "leju_robot_hotel_services_ae", - "dataset_name": "hotel_services_ae", + "R1_Lite_move_the_position_of_the_brush": { + "path": "R1_Lite_move_the_position_of_the_brush", + "dataset_name": "move_the_position_of_the_brush", "robot_type": "", "end_effector_type": [ - "five_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ - "grasp", + "place", "pick", - "place" + "grasp" ], - "tasks": "Pick up the key card from the card box.", + "tasks": "Static", "objects": [ { "object_name": "table", @@ -92294,790 +102368,355 @@ "level5": null }, { - "object_name": "box", - "level1": "home_storage", - "level2": "box", + "object_name": "pen", + "level1": "office_supplies", + "level2": "pen", "level3": null, "level4": null, "level5": null }, { - "object_name": "card", - "level1": "nfc", - "level2": "card", + "object_name": "banana", + "level1": "fruit", + "level2": "banana", "level3": null, "level4": null, "level5": null }, { - "object_name": "sensor", - "level1": "electronic_products", - "level2": "sensor", + "object_name": "bath_ball", + "level1": "daily_necessities", + "level2": "bath_ball", "level3": null, "level4": null, "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-78394", - "dataset_size": "5.0GB", - "statistics": { - "total_episodes": 449, - "total_frames": 78394, - "total_tasks": 1, - "total_videos": 1347, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "91537f29-b5ac-455f-bfdf-78d671e6f66d", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Pick up the key card from the card box.", - "Hand the key card to the target.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_hotel_services_ae_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_hotel_services_ae_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "agilex_cobot_magic_pass_object_left_to_right_black_tablecloth": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } - ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "agilex_cobot_magic_pass_object_left_to_right_black_tablecloth", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "commercial_convenience", - "level2": "supermarket", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ { - "object_name": "table", - "level1": "home_storage", - "level2": "table", + "object_name": "bowl", + "level1": "container", + "level2": "bowl", "level3": null, "level4": null, "level5": null }, { - "object_name": "ambrosial_yogurt", - "level1": "food", - "level2": "ambrosial_yogurt", + "object_name": "plate", + "level1": "container", + "level2": "plate", "level3": null, "level4": null, "level5": null }, { - "object_name": "banana", - "level1": "food", - "level2": "banana", + "object_name": "can", + "level1": "container", + "level2": "can", "level3": null, "level4": null, "level5": null }, { - "object_name": "long_bread", - "level1": "food", - "level2": "long_bread", + "object_name": "eraser", + "level1": "office_supplies", + "level2": "eraser", "level3": null, "level4": null, "level5": null }, { - "object_name": "milk", - "level1": "food", - "level2": "milk", + "object_name": "hard_cleanser", + "level1": "daily_necessities", + "level2": "hard_cleanser", "level3": null, "level4": null, "level5": null }, { - "object_name": "yogurt", + "object_name": "long_bread", "level1": "food", - "level2": "yogurt", + "level2": "long_bread", "level3": null, "level4": null, "level5": null }, { - "object_name": "grape", - "level1": "food", - "level2": "grape", + "object_name": "peeler", + "level1": "tool", + "level2": "peeler", "level3": null, "level4": null, "level5": null }, { - "object_name": "ham_sausage", - "level1": "food", - "level2": "ham_sausage", + "object_name": "block", + "level1": "toy", + "level2": "block", "level3": null, "level4": null, "level5": null }, { - "object_name": "eggplant", - "level1": "food", - "level2": "eggplant", + "object_name": "duck", + "level1": "toy", + "level2": "duck", "level3": null, "level4": null, "level5": null }, { - "object_name": "chewing_gum", - "level1": "food", - "level2": "chewing_gum", + "object_name": "soap", + "level1": "daily_necessities", + "level2": "soap", "level3": null, "level4": null, "level5": null }, { - "object_name": "eyeglass_case", - "level1": "laboratory_supplies", - "level2": "eyeglass_case", + "object_name": "box", + "level1": "container", + "level2": "box", "level3": null, "level4": null, "level5": null }, { - "object_name": "rubik's_cube", - "level1": "toys", - "level2": "rubik's_cube", + "object_name": "basket", + "level1": "container", + "level2": "basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "purple_trash_bag", - "level1": "trash", - "level2": "purple_trash_bag", + "object_name": "cola", + "level1": "drink", + "level2": "cola", "level3": null, "level4": null, "level5": null }, { - "object_name": "cleanser", + "object_name": "detergent", "level1": "daily_necessities", - "level2": "cleanser", + "level2": "detergent", "level3": null, "level4": null, "level5": null }, { - "object_name": "bathing_in_flowers", - "level1": "daily_necessities", - "level2": "bathing_in_flowers", + "object_name": "egg_beater", + "level1": "electrical_appliances", + "level2": "egg_beater", "level3": null, "level4": null, "level5": null }, { - "object_name": "whiteboard_eraser", - "level1": "stationery", - "level2": "whiteboard_eraser", + "object_name": "towel", + "level1": "clothing", + "level2": "towel", "level3": null, "level4": null, "level5": null }, { - "object_name": "candle", - "level1": "daily_necessities", - "level2": "candle", + "object_name": "orange", + "level1": "fruit", + "level2": "orange", "level3": null, "level4": null, "level5": null }, { - "object_name": "black_table_cloths", - "level1": "laboratory_supplies", - "level2": "black_table_cloths", + "object_name": "peach", + "level1": "fruit", + "level2": "peach", "level3": null, "level4": null, - "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "use the left gripper to pick up the item and transfer it from the left gripper to the right gripper." - ], - "sub_tasks": [ - { - "subtask": "Grasp the XX with the right gripper", - "subtask_index": 0 - }, - { - "subtask": "Place the XX on the table with the left gripper", - "subtask_index": 1 - }, - { - "subtask": "Place the XX on the table with the right gripper", - "subtask_index": 2 - }, - { - "subtask": "Grasp the XX with the left gripper", - "subtask_index": 3 - }, - { - "subtask": "End", - "subtask_index": 4 - }, - { - "subtask": "Pass the xx to the right gripper", - "subtask_index": 5 - }, - { - "subtask": "null", - "subtask_index": 6 - } - ], - "atomic_actions": [ - "grasp", - "lift", - "lower", - "handover", - "takeover" - ], - "robot_name": [ - "agilex_cobot_magic" - ], - "end_effector_type": "two_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb" - ], - "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" - }, - "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" - ], - "statistics": { - "total_episodes": 98, - "total_frames": 60078, - "fps": 30, - "total_tasks": 7, - "total_videos": 294, - "total_chunks": 1, - "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "1.18 GB" - }, - "frame_num": 60078, - "dataset_size": "1.18 GB", - "data_structure": "Agilex_Cobot_Magic_pass_object_left_to_right_black_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", - "splits": { - "train": "0:97" - }, - "features": { - "observation.images.cam_head_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_left_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_right_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.state": { - "dtype": "float32", - "shape": [ - 26 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" - ] - }, - "action": { - "dtype": "float32", - "shape": [ - 26 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" - ] - }, - "timestamp": { - "dtype": "float32", - "shape": [ - 1 - ], - "names": null - }, - "frame_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + "level5": null }, - "episode_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "object_name": "marker", + "level1": "office_supplies", + "level2": "marker", + "level3": null, + "level4": null, + "level5": null }, - "index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "object_name": "rubiks_cube", + "level1": "toy", + "level2": "rubiks_cube", + "level3": null, + "level4": null, + "level5": null }, - "task_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "object_name": "bread_slice", + "level1": "food", + "level2": "bread_slice", + "level3": null, + "level4": null, + "level5": null }, - "subtask_annotation": { - "names": null, - "dtype": "int32", - "shape": [ - 5 - ] + { + "object_name": "brush", + "level1": "daily_necessities", + "level2": "brush", + "level3": null, + "level4": null, + "level5": null }, - "scene_annotation": { - "names": null, - "dtype": "int32", - "shape": [ - 1 - ] + { + "object_name": "yogurt", + "level1": "drink", + "level2": "yogurt", + "level3": null, + "level4": null, + "level5": null }, - "eef_sim_pose_state": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "dtype": "float32", - "shape": [ - 12 - ] + { + "object_name": "power_strip", + "level1": "electric_appliance", + "level2": "power_strip", + "level3": null, + "level4": null, + "level5": null }, - "eef_sim_pose_action": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "dtype": "float32", - "shape": [ - 12 - ] + { + "object_name": "milk", + "level1": "drink", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "object_name": "soda", + "level1": "drink", + "level2": "soda", + "level3": null, + "level4": null, + "level5": null }, - "eef_direction_action": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "object_name": "lime", + "level1": "fruit", + "level2": "lime", + "level3": null, + "level4": null, + "level5": null }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "object_name": "coffee_capsule", + "level1": "drink", + "level2": "coffee_capsule", + "level3": null, + "level4": null, + "level5": null }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "object_name": "dish", + "level1": "container", + "level2": "dish", + "level3": null, + "level4": null, + "level5": null }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "object_name": "glass", + "level1": "furniture", + "level2": "glass", + "level3": null, + "level4": null, + "level5": null }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "object_name": "egg_yolk_pastry", + "level1": "food", + "level2": "egg_yolk_pastry", + "level3": null, + "level4": null, + "level5": null }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "object_name": "glasses_case", + "level1": "daily_necessities", + "level2": "glasses_case", + "level3": null, + "level4": null, + "level5": null }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "object_name": "gum", + "level1": "daily_necessities", + "level2": "gum", + "level3": null, + "level4": null, + "level5": null }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "object_name": "tape", + "level1": "daily_necessities", + "level2": "tape", + "level3": null, + "level4": null, + "level5": null }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "object_name": "soft_cleanser", + "level1": "daily_necessities", + "level2": "soft_cleanser", + "level3": null, + "level4": null, + "level5": null }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] + { + "object_name": "chips", + "level1": "food", + "level2": "chips", + "level3": null, + "level4": null, + "level5": null }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] - } - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" - } - ] - }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" - }, - "Cobot_Magic_clear_the_desktop": { - "path": "Cobot_Magic_clear_the_desktop", - "dataset_name": "clear_the_desktop", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place the tablecloth", - "objects": [ { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "chocolate", + "level1": "food", + "level2": "chocolate", "level3": null, "level4": null, "level5": null }, { - "object_name": "rag", - "level1": "clothing", - "level2": "rag", + "object_name": "cookie", + "level1": "food", + "level2": "cookie", "level3": null, "level4": null, "level5": null }, { - "object_name": "coffee_stains", - "level1": "garbage", - "level2": "coffee_stains", + "object_name": "spoon", + "level1": "tableware", + "level2": "spoon", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-157415", - "dataset_size": "1018.3MB", + "frame_range": "0-17774", + "dataset_size": "677.0MB", "statistics": { - "total_episodes": 293, - "total_frames": 157415, - "total_tasks": 3, - "total_videos": 879, + "total_episodes": 63, + "total_frames": 17774, + "total_tasks": 1, + "total_videos": 252, "total_chunks": 1, "chunks_size": 1000, - "fps": 50 + "fps": 30 }, - "dataset_uuid": "1ef1c2b2-61c3-459c-a45b-332abc0bdb3e", + "dataset_uuid": "0626d7f7-69b7-4fb0-9edc-8ba92d1df7cf", "language": [ "en", "zh" @@ -93086,13 +102725,13 @@ "robotics" ], "sub_tasks": [ - "Place the tablecloth", - "Sweep the stains on the table", + "Static", + "Place the brush on the table with right gripper", + "Grasp the brush with right gripper", + "Grasp the brush with left gripper", + "Place the brush on the table with left gripper", "End", - "Sweep the stains on the table with the right gripper", - "Place the tablecloth with the right gripper", - "Pick up the tablecloth", - "Pick up the tablecloth with the right gripper", + "Abnormal", "null" ], "annotations": { @@ -93130,10 +102769,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Cobot_Magic_clear_the_desktop_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_clear_the_desktop_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "R1_Lite_move_the_position_of_the_brush_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_move_the_position_of_the_brush_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_swap_apple_cake_plate": { + "Galaxea_R1_Lite_pour_solid": { "task_categories": [ "robotics" ], @@ -93163,10 +102802,10 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_swap_apple_cake_plate", + "dataset_name": "Galaxea_R1_Lite_pour_solid", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", + "level1": "ousehhold", "level2": "kitchen", "level3": null, "level4": null, @@ -93175,25 +102814,41 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "cake", - "level1": "bread", - "level2": "cake", + "object_name": "white_table", + "level1": "furniture", + "level2": "white_table", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", + "object_name": "plastic_cup", + "level1": "cups", + "level2": "plastic_cup", "level3": null, "level4": null, "level5": null }, { - "object_name": "apple", - "level1": "fruit", - "level2": "apple", + "object_name": "green_dish", + "level1": "plates", + "level2": "green_dish", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pink_bowl", + "level1": "plastic_bowls", + "level2": "pink_bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "solid", + "level1": "materials", + "level2": "solid", "level3": null, "level4": null, "level5": null @@ -93201,55 +102856,172 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "take the cake out of the plate and put the apples in." + "use a gripper to pick up the cup and pour the solid into a bowl or tray." ], "sub_tasks": [ { - "subtask": "Grasp the cake placed into the plate with the left gripper", + "subtask": "Grasp the glass of shrimp with the left gripper", "subtask_index": 0 }, { - "subtask": "Grasp the apple with the right gripper", + "subtask": "Pick up blue cup filled with coffee beans with left gripper", "subtask_index": 1 }, { - "subtask": "End", + "subtask": "Place blue cup with coffee beans on the table with left gripper", "subtask_index": 2 }, { - "subtask": "Place the cake on the table with the left gripper", + "subtask": "Pour the shrimp into the green bowl with the left gripper", "subtask_index": 3 }, { - "subtask": "Place the apple into the plate with the right gripper", + "subtask": "Pour the coffee beans into the pink bowl with the left gripper", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "Pour the coffee beans into the pink bowl with the right gripper", "subtask_index": 5 + }, + { + "subtask": "Left gripper", + "subtask_index": 6 + }, + { + "subtask": "Pour the shrimp into the green bowl with the right gripper", + "subtask_index": 7 + }, + { + "subtask": "Pick up blue cup filled with shrimp with left gripper", + "subtask_index": 8 + }, + { + "subtask": "Place blue cup with shrimp on the table with left gripper", + "subtask_index": 9 + }, + { + "subtask": "Pick up blue cup filled with coffee beans with right gripper", + "subtask_index": 10 + }, + { + "subtask": "Grasp the glass of coffee beans with the left gripper", + "subtask_index": 11 + }, + { + "subtask": "Pour the shrimp into the pink bowl with left gripper", + "subtask_index": 12 + }, + { + "subtask": "Pour the coffee beans into the green bowl with the left gripper", + "subtask_index": 13 + }, + { + "subtask": "Pour the coffee beans into the pink bowl with left gripper", + "subtask_index": 14 + }, + { + "subtask": "Grasp the glass of coffee beans with the right gripper", + "subtask_index": 15 + }, + { + "subtask": "Pour the coffee beans into the green plate with the right gripper", + "subtask_index": 16 + }, + { + "subtask": "Pour the shrimp into the pink bowl with the right gripper", + "subtask_index": 17 + }, + { + "subtask": "Grasp the glass of shrimp with the right gripper", + "subtask_index": 18 + }, + { + "subtask": "Pour the coffee beans into the green plate with the left gripper", + "subtask_index": 19 + }, + { + "subtask": "Place blue cup with coffee powder on the table with left gripper", + "subtask_index": 20 + }, + { + "subtask": "Place the glass cup with the right gripper", + "subtask_index": 21 + }, + { + "subtask": "Place blue cup with coffee beans on the table with right gripper", + "subtask_index": 22 + }, + { + "subtask": "End", + "subtask_index": 23 + }, + { + "subtask": "Pour the coffee beans into the green bowl with the right gripper", + "subtask_index": 24 + }, + { + "subtask": "Place blue cup with shrimp on the table with right gripper", + "subtask_index": 25 + }, + { + "subtask": "Pick up blue cup filled with shrimp with right gripper", + "subtask_index": 26 + }, + { + "subtask": "Pour the shrimp into the pink bowl with right gripper", + "subtask_index": 27 + }, + { + "subtask": "Pour the shrimp into the pink bowl with the left gripper", + "subtask_index": 28 + }, + { + "subtask": "Pour the shrimp into the blue basin with left gripper", + "subtask_index": 29 + }, + { + "subtask": "Pour the coffee beans into the blue basin with right gripper", + "subtask_index": 30 + }, + { + "subtask": "Right gripper", + "subtask_index": 31 + }, + { + "subtask": "Pour the shrimp into the blue basin with right gripper", + "subtask_index": 32 + }, + { + "subtask": "Place the glass cup with the left gripper", + "subtask_index": 33 + }, + { + "subtask": "null", + "subtask_index": 34 } ], "atomic_actions": [ "grasp", "pick", - "place" + "place", + "pour" ], "robot_name": [ - "Airbot_MMK2" + "Galaxea_R1_Lite" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -93266,30 +103038,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 47, - "total_frames": 6775, + "total_episodes": 39, + "total_frames": 16353, "fps": 30, - "total_tasks": 6, - "total_videos": 188, + "total_tasks": 35, + "total_videos": 156, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, + "state_dim": 14, + "action_dim": 14, "camera_views": 4, - "dataset_size": "182.07 MB" + "dataset_size": "459.77 MB" }, - "frame_num": 6775, - "dataset_size": "182.07 MB", - "data_structure": "Airbot_MMK2_swap_apple_cake_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 16353, + "dataset_size": "459.77 MB", + "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_pour_solid_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (27 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:46" + "train": "0:38" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -93298,8 +103070,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -93308,11 +103080,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_head_right_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -93321,8 +103093,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -93331,10 +103103,10 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -93344,7 +103116,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -93354,10 +103126,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -93367,7 +103139,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -93380,7 +103152,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -93395,36 +103167,14 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -93439,30 +103189,8 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "timestamp": { @@ -93613,6 +103341,66 @@ 2 ], "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" } }, "authors": { @@ -93634,9 +103422,220 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Agilex_Cobot_Magic_move_mouse_pen": { + "R1_Lite_boil_water_in_a_kettle": { + "path": "R1_Lite_boil_water_in_a_kettle", + "dataset_name": "boil_water_in_a_kettle", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Start boiling water", + "objects": [ + { + "object_name": "stove_top", + "level1": "furniture", + "level2": "stove_top", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "boil_the_kettle", + "level1": "electric_appliance", + "level2": "boil_the_kettle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "heating_plate", + "level1": "electric_appliance", + "level2": "heating_plate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "sink", + "level1": "container", + "level2": "sink", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-85792", + "dataset_size": "3.9GB", + "statistics": { + "total_episodes": 75, + "total_frames": 85792, + "total_tasks": 1, + "total_videos": 225, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "68764373-ee5e-45aa-ad8a-1ca6cf3e0834", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Start boiling water", + "Place it on the power base", + "Finish boiling water", + "abnormal", + "Place it by the sink", + "Pick up the kettle", + "Grab the kettle", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_boil_water_in_a_kettle_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_boil_water_in_a_kettle_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "G1edu-u3_place_apple_c": { + "path": "G1edu-u3_place_apple_c", + "dataset_name": "place_apple_c", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "pick", + "place" + ], + "tasks": "End", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "apple", + "level1": "fruit", + "level2": "apple", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-6567", + "dataset_size": "81.6MB", + "statistics": { + "total_episodes": 29, + "total_frames": 6567, + "total_tasks": 1, + "total_videos": 29, + "total_chunks": 1, + "chunks_size": 30, + "fps": 30 + }, + "dataset_uuid": "fadd4ee3-65b3-497e-a817-3885814cab0c", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Place the apple on the table with right gripper", + "Place the apple on the table with left gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_place_apple_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_place_apple_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_move_block_both_hands": { "task_categories": [ "robotics" ], @@ -93666,11 +103665,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_move_mouse_pen", + "dataset_name": "Airbot_MMK2_move_block_both_hands", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "office & workspace", - "level2": "office", + "level1": "household", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -93678,49 +103677,9 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "home_storage", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "deli_water-based_marker", - "level1": "stationery", - "level2": "deli_water-based_marker", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "notebook", - "level1": "stationery", - "level2": "notebook", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mouse", - "level1": "appliances", - "level2": "mouse", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mouse_pad", - "level1": "appliances", - "level2": "mouse_pad", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "white_table_cloths", - "level1": "laboratory_supplies", - "level2": "white_table_cloths", + "object_name": "square_building_blocks", + "level1": "toys", + "level2": "square_building_blocks", "level3": null, "level4": null, "level5": null @@ -93728,75 +103687,61 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "the left gripper organize the pen on the notebook then the right gripper organize the mouse on the mouse pad." + "pick up the building blocks with both hands simultaneously and place them on the blocks." ], "sub_tasks": [ { - "subtask": "Grasp the mouse with the left gripper", + "subtask": "Place the blue block on top of the red block with the left gripper", "subtask_index": 0 }, { - "subtask": "Grasp the marker with the left gripper", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "Grasp the mouse with the right gripper", + "subtask": "Grasp the green block with the right gripper", "subtask_index": 2 }, { - "subtask": "Place the mouse on the mouse pad with the right gripper", + "subtask": "Grasp the blue block with the left gripper", "subtask_index": 3 }, { - "subtask": "Place the marker on the notebook with the right gripper", + "subtask": "Place the green block on top of the yellow block with the right gripper", "subtask_index": 4 }, - { - "subtask": "Grasp the marker with the right gripper", - "subtask_index": 5 - }, - { - "subtask": "end", - "subtask_index": 6 - }, - { - "subtask": "Place the mouse on the mouse pad with the left gripper", - "subtask_index": 7 - }, - { - "subtask": "Place the marker on the notebook with the left gripper", - "subtask_index": 8 - }, { "subtask": "null", - "subtask_index": 9 + "subtask_index": 5 } ], "atomic_actions": [ "grasp", - "lift", - "lower" + "pick", + "place" ], "robot_name": [ - "Agilex_Cobot_Magic" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -93807,23 +103752,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 30, - "total_frames": 22756, + "total_episodes": 54, + "total_frames": 8888, "fps": 30, - "total_tasks": 10, - "total_videos": 90, + "total_tasks": 6, + "total_videos": 216, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "276.67 MB" + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "388.10 MB" }, - "frame_num": 22756, - "dataset_size": "276.67 MB", - "data_structure": "Agilex_Cobot_Magic_move_mouse_pen_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (18 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 8888, + "dataset_size": "388.10 MB", + "data_structure": "Airbot_MMK2_move_block_both_hands_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (42 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:29" + "train": "0:53" }, "features": { "observation.images.cam_head_rgb": { @@ -93895,10 +103840,33 @@ "has_audio": false } }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, "observation.state": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -93907,32 +103875,42 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -93941,26 +103919,36 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -94111,66 +104099,6 @@ 2 ], "dtype": "int32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] } }, "authors": { @@ -94192,343 +104120,11 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" - }, - "leju_robot_moving_parts_h": { - "path": "leju_robot_moving_parts_h", - "dataset_name": "moving_parts_h", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Grasp the gray part with right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "parts", - "level1": "mechanical_parts", - "level2": "parts", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cabinet", - "level1": "home_storage", - "level2": "cabinet", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-234391", - "dataset_size": "14.5GB", - "statistics": { - "total_episodes": 162, - "total_frames": 234391, - "total_tasks": 1, - "total_videos": 486, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "27a39264-c303-45a1-9046-1419479116fb", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the gray part with right gripper", - "Insert the small component into the corresponding slot on the workbench.", - "Move to the table behind body", - "Grasp the white part with right gripper", - "Abnormal", - "Move the small component to the workbench.", - "Place the gray part on the table with right gripper", - "Place the white part on the table with right gripper", - "End", - "Pick up the small component from the shelf.", - "Return to the initial position at the workbench.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_moving_parts_h_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_moving_parts_h_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AIRBOT_MMK2_beauty_sponge_and_cake_to_place": { - "path": "AIRBOT_MMK2_beauty_sponge_and_cake_to_place", - "dataset_name": "beauty_sponge_and_cake_to_place", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Static", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "beauty_blender", - "level1": "tools", - "level2": "beauty_blender", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cake", - "level1": "food", - "level2": "cake", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-8096", - "dataset_size": "394.9MB", - "statistics": { - "total_episodes": 50, - "total_frames": 8096, - "total_tasks": 1, - "total_videos": 200, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "c4b8c5b1-7a57-48f3-9074-c2a019dbfd16", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Static", - "End", - "Grasp the bullet into the plate with the left gripper", - "Grasp the cake on the table with the right gripper", - "Place the bullet on the table with the left gripper", - "Abnormal", - "Place the cake into the plate with the right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_beauty_sponge_and_cake_to_place_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_beauty_sponge_and_cake_to_place_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "G1edu-u3_pick_up_the_bottled_water_a": { - "path": "G1edu-u3_pick_up_the_bottled_water_a", - "dataset_name": "pick_up_the_bottled_water_a", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick" - ], - "tasks": "Grasp the water bottle and lift it to the center of the view with right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bottle", - "level1": "container", - "level2": "bottle", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "water", - "level1": "beverages", - "level2": "water", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-12057", - "dataset_size": "153.0MB", - "statistics": { - "total_episodes": 29, - "total_frames": 12057, - "total_tasks": 1, - "total_videos": 29, - "total_chunks": 1, - "chunks_size": 29, - "fps": 30 - }, - "dataset_uuid": "e56a77d6-dbe5-4d31-a397-61ccb616903c", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the water bottle and lift it to the center of the view with right gripper", - "Grasp the water bottle and lift it to the center of the view with left gripper", - "End", - "Abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_pick_up_the_bottled_water_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_pick_up_the_bottled_water_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "G1edu-u3_basket_storage_apple": { - "path": "G1edu-u3_basket_storage_apple", - "dataset_name": "basket_storage_apple", + "G1edu-u3_food_storage": { + "path": "G1edu-u3_food_storage", + "dataset_name": "food_storage", "robot_type": "", "end_effector_type": [ "three_finger_hand" @@ -94539,7 +104135,7 @@ "pick", "place" ], - "tasks": "Grasp the apple with the left gripper", + "tasks": "Grasp the bowl with the left gripper", "objects": [ { "object_name": "table", @@ -94558,494 +104154,51 @@ "level5": null }, { - "object_name": "apple", - "level1": "fruit", - "level2": "apple", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-45574", - "dataset_size": "1.5GB", - "statistics": { - "total_episodes": 88, - "total_frames": 45574, - "total_tasks": 1, - "total_videos": 264, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "ad45e74b-fccc-4f12-9f34-a2312636f61c", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the apple with the left gripper", - "Place the apple into the basket with the left gripper", - "End", - "Static", - "Move the basket to the front of the table with the right gripper", - "Grasp the basket with the right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_basket_storage_apple_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_basket_storage_apple_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "R1_Lite_open_and_close_the_freezer_door": { - "path": "R1_Lite_open_and_close_the_freezer_door", - "dataset_name": "open_and_close_the_freezer_door", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place", - "open" - ], - "tasks": "Abnormal", - "objects": [ - { - "object_name": "freezer", - "level1": "furniture", - "level2": "freezer", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "ceramic_cup", - "level1": "container", - "level2": "ceramic_cup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yogurt", - "level1": "food", - "level2": "yogurt", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "paper_cup", - "level1": "container", - "level2": "paper_cup", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-170369", - "dataset_size": "6.6GB", - "statistics": { - "total_episodes": 93, - "total_frames": 170369, - "total_tasks": 1, - "total_videos": 279, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "d59cd2ec-8eab-40f4-a630-29a9bbc3f309", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Abnormal", - "End", - "Place the ceramic cup on the table", - "Put the yoghurt in the freezer", - "Take the paper cup out of the freezer", - "Place the yoghurt on the table", - "Take the ceramic cup out of the freezer", - "Put the ceramic cup in the freezer", - "Take the yogurt out of the freezer", - "Open the freezer door", - "Put the paper cup in the freezer", - "Put the yogurt in the freezer", - "Place the paper cup on the table", - "Close the freezer door", - "Grasp the yogur", - "Place the yoghurt on the freezer", - "Place the yogurt on the table", - "Take the yoghurt out of the freezer", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_open_and_close_the_freezer_door_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_open_and_close_the_freezer_door_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "leju_robot_moving_parts_o": { - "path": "leju_robot_moving_parts_o", - "dataset_name": "moving_parts_o", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Return to the initial position at the shelf", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "parts", - "level1": "mechanical_parts", - "level2": "parts", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cabinet", - "level1": "home_storage", - "level2": "cabinet", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-745064", - "dataset_size": "51.1GB", - "statistics": { - "total_episodes": 490, - "total_frames": 745064, - "total_tasks": 1, - "total_videos": 1470, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "27cad5ab-323b-4157-acc6-9f7405263110", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Return to the initial position at the shelf", - "Grasp the black part with right gripper", - "End", - "Pick up the large material from the shelf", - "Place the black part on the table with right gripper", - "Move to the table behind body", - "Move the large material to the workbench", - "Insert the large material into the corresponding slot on the workbench", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_moving_parts_o_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_moving_parts_o_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "leju_robot_part_placement": { - "path": "leju_robot_part_placement", - "dataset_name": "part_placement", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Insert the small parts into the corresponding slots on the workbench.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "parts", - "level1": "mechanical_parts", - "level2": "parts", + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", "level3": null, "level4": null, "level5": null }, { - "object_name": "cabinet", - "level1": "home_storage", - "level2": "cabinet", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-796570", - "dataset_size": "50.4GB", - "statistics": { - "total_episodes": 538, - "total_frames": 796570, - "total_tasks": 1, - "total_videos": 1614, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "41b426f1-987c-4320-8263-5f89139cf899", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Insert the small parts into the corresponding slots on the workbench.", - "Move the large parts to the workbench.", - "Pick up the large parts from the shelf.", - "Grasp the black part with right gripper", - "Grasp the gray part with right gripper", - "End", - "Return to the initial position at the workbench.", - "Place the gray part on the table with right gripper", - "Place the black part on the table with right gripper", - "Move to the table behind body", - "Insert the large parts into the corresponding slots on the workbench.", - "Return to the initial position at the shelf.", - "Move the small parts to the workbench.", - "Pick up the small parts from the shelf.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_part_placement_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_part_placement_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Cobot_Magic_move_the_ball": { - "path": "Cobot_Magic_move_the_ball", - "dataset_name": "move_the_ball", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Put the picked-up object on the table.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "bowl", + "level1": "bowl", + "level2": "bowl", "level3": null, "level4": null, "level5": null }, { - "object_name": "ball", - "level1": "toy", - "level2": "ball", + "object_name": "donut", + "level1": "food", + "level2": "donut", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "container", - "level2": "plate", + "object_name": "cake", + "level1": "bread", + "level2": "cake", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-58184", - "dataset_size": "984.3MB", + "frame_range": "0-316377", + "dataset_size": "13.6GB", "statistics": { - "total_episodes": 100, - "total_frames": 58184, + "total_episodes": 190, + "total_frames": 316377, "total_tasks": 1, - "total_videos": 300, + "total_videos": 570, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "371a2f88-8a95-4044-ba5d-4c287fd9b679", + "dataset_uuid": "4f2a72fa-3388-401a-ab17-b437417cce23", "language": [ "en", "zh" @@ -95054,10 +104207,14 @@ "robotics" ], "sub_tasks": [ - "Put the picked-up object on the table.", - "Grab the rolling spherical object.", - "Grab the spherical object with your left arm.", - "Place the sphere on the white object.", + "Grasp the bowl with the left gripper", + "Pour the cake into the basket with the left gripper", + "End", + "Place the bowl in the center of the table with the left gripper", + "Pick up the bread with the right gripper and place it on the plate", + "Place the bowl on the table with the left gripper", + "Push the bowl to the center of the table using the left gripper", + "Abnormal", "null" ], "annotations": { @@ -95095,311 +104252,1035 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Cobot_Magic_move_the_ball_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_move_the_ball_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "G1edu-u3_food_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_food_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "alpha_bot_2_press_the_button_a": { - "path": "alpha_bot_2_press_the_button_a", - "dataset_name": "press_the_button_a", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" + "Galaxea_R1_Lite_storage_object_brown_bowl": { + "task_categories": [ + "robotics" ], - "scene_type": [], - "atomic_actions": [ - "pressbutton" + "language": [ + "en" ], - "tasks": "End", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Galaxea_R1_Lite_storage_object_brown_bowl", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "brown_bowl", + "level1": "plastic_bowl", + "level2": "brown_bowl", "level3": null, "level4": null, "level5": null }, { - "object_name": "button", - "level1": "toy", - "level2": "button", + "object_name": "banana", + "level1": "fruits", + "level2": "banana", "level3": null, "level4": null, "level5": null }, { - "object_name": "water_bottle", + "object_name": "bathing_in_flowers", + "level1": "daily_chemical_products", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_cup", + "level1": "cups", + "level2": "blue_cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_pot", + "level1": "cookware", + "level2": "blue_pot", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "toast_slices", + "level1": "bread", + "level2": "toast_slices", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_towel", + "level1": "towels", + "level2": "brown_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "can", + "level1": "snacks", + "level2": "can", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "coke(slim_can)", + "level1": "beveragesbeverages", + "level2": "coke(slim_can)", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "potato_chips", + "level1": "snacks", + "level2": "potato_chips", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chocolate", + "level1": "snacks", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "compass", + "level1": "rulers", + "level2": "compass", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "block_pillar", + "level1": "building_blocks", + "level2": "block_pillar", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "egg_beater", + "level1": "spoons_and_spatulas", + "level2": "egg_beater", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eraser", + "level1": "erasers", + "level2": "eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "snacks", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_chemical_products", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_lemon", + "level1": "fruits", + "level2": "green_lemon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "fruits", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "power_strip", + "level1": "electrical_control_equipment", + "level2": "power_strip", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "round_bread", + "level1": "bread", + "level2": "round_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_chemical_products", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "square_building_blocks", + "level1": "building_blocks", + "level2": "square_building_blocks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tape", + "level1": "stationery", + "level2": "tape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cake", + "level1": "bread", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "duck", + "level1": "doll", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ambrosial_yogurt", "level1": "beverages", - "level2": "water_bottle", + "level2": "ambrosial_yogurt", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-91452", - "dataset_size": "1.9GB", + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use a gripper to pick the target object and place on the brown bowl." + ], + "sub_tasks": [ + { + "subtask": "Grasp the blue pot with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "Place the coke in the bowl with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Grasp the plugboard with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the back scratcher in the bowl with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Place the plugboard in the bowl with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "Place the compasses in the bowl with the right gripper", + "subtask_index": 5 + }, + { + "subtask": "Grasp the potato chips with the right gripper", + "subtask_index": 6 + }, + { + "subtask": "Grasp the banana with the left gripper", + "subtask_index": 7 + }, + { + "subtask": "Place the square chewing gum in the bowl with the right gripper", + "subtask_index": 8 + }, + { + "subtask": "Place the round wooden block in the bowl with the right gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the tin in the bowl with the left gripper", + "subtask_index": 10 + }, + { + "subtask": "Grasp the compasses with the right gripper", + "subtask_index": 11 + }, + { + "subtask": "Grasp the duck toy with the left gripper", + "subtask_index": 12 + }, + { + "subtask": "Place the potato chips in the bowl with the left gripper", + "subtask_index": 13 + }, + { + "subtask": "Grasp the blue cup with the left gripper", + "subtask_index": 14 + }, + { + "subtask": "Place the chocolate cake in the bowl with the right gripper", + "subtask_index": 15 + }, + { + "subtask": "Grasp the back scratcher with the right gripper", + "subtask_index": 16 + }, + { + "subtask": "Place the square wooden block in the bowl with the right gripper", + "subtask_index": 17 + }, + { + "subtask": "Grasp the chocolate cake with the right gripper", + "subtask_index": 18 + }, + { + "subtask": "Grasp the shower sphere with the left gripper", + "subtask_index": 19 + }, + { + "subtask": "Grasp the plugboard with the right gripper", + "subtask_index": 20 + }, + { + "subtask": "Grasp the yogurt with the right gripper", + "subtask_index": 21 + }, + { + "subtask": "Grasp the tin with the left gripper", + "subtask_index": 22 + }, + { + "subtask": "Grasp the brown towel with the left gripper", + "subtask_index": 23 + }, + { + "subtask": "Place the square wooden block in the bowl with the left gripper", + "subtask_index": 24 + }, + { + "subtask": "Grasp the hard facial cleanser with the left gripper", + "subtask_index": 25 + }, + { + "subtask": "Grasp the brown towel with the right gripper", + "subtask_index": 26 + }, + { + "subtask": "Place the potato chips in the bowl with the right gripper", + "subtask_index": 27 + }, + { + "subtask": "Place the duck toy in the bowl with the left gripper", + "subtask_index": 28 + }, + { + "subtask": "Grasp the hard facial cleanser with the right gripper", + "subtask_index": 29 + }, + { + "subtask": "Place the green lemon in the bowl with the right gripper", + "subtask_index": 30 + }, + { + "subtask": "Grasp the peach with the left gripper", + "subtask_index": 31 + }, + { + "subtask": "Place the peach in the bowl with the left gripper", + "subtask_index": 32 + }, + { + "subtask": "Place the soft facial cleanser in the bowl with the right gripper", + "subtask_index": 33 + }, + { + "subtask": "Place the shower sphere in the bowl with the left gripper", + "subtask_index": 34 + }, + { + "subtask": "Place the banana in the bowl with the right gripper", + "subtask_index": 35 + }, + { + "subtask": "Place the shower sphere in the bowl with the right gripper", + "subtask_index": 36 + }, + { + "subtask": "Grasp the green lemon with the right gripper", + "subtask_index": 37 + }, + { + "subtask": "Place the brown towel in the bowl with the right gripper", + "subtask_index": 38 + }, + { + "subtask": "Place the blackboard erasure in the bowl with the left gripper", + "subtask_index": 39 + }, + { + "subtask": "Grasp the potato chips with the left gripper", + "subtask_index": 40 + }, + { + "subtask": "Grasp the duck toy with the right gripper", + "subtask_index": 41 + }, + { + "subtask": "End", + "subtask_index": 42 + }, + { + "subtask": "Place the blue cup in the bowl with the right gripper", + "subtask_index": 43 + }, + { + "subtask": "Grasp the blackboard erasure with the left gripper", + "subtask_index": 44 + }, + { + "subtask": "Grasp the coke with the left gripper", + "subtask_index": 45 + }, + { + "subtask": "Grasp the round wooden block with the left gripper", + "subtask_index": 46 + }, + { + "subtask": "Place the round wooden block in the bowl with the left gripper", + "subtask_index": 47 + }, + { + "subtask": "Place the banana in the bowl with the left gripper", + "subtask_index": 48 + }, + { + "subtask": "Place the chocolate cake in the bowl with the left gripper", + "subtask_index": 49 + }, + { + "subtask": "Place the tape in the bowl with the right gripper", + "subtask_index": 50 + }, + { + "subtask": "Grasp the compasses with the left gripper", + "subtask_index": 51 + }, + { + "subtask": "Place the chocolate in the bowl with the left gripper", + "subtask_index": 52 + }, + { + "subtask": "Grasp the blue pot with the right gripper", + "subtask_index": 53 + }, + { + "subtask": "Place the hard facial cleanser in the bowl with the left gripper", + "subtask_index": 54 + }, + { + "subtask": "Grasp the round bread with the right gripper", + "subtask_index": 55 + }, + { + "subtask": "Grasp the chocolate cake with the left gripper", + "subtask_index": 56 + }, + { + "subtask": "Place the hard facial cleanser in the bowl with the right gripper", + "subtask_index": 57 + }, + { + "subtask": "Place the round bread in the bowl with the right gripper", + "subtask_index": 58 + }, + { + "subtask": "Grasp the tape with the right gripper", + "subtask_index": 59 + }, + { + "subtask": "Place the tape in the bowl with the left gripper", + "subtask_index": 60 + }, + { + "subtask": "Grasp the coke with the right gripper", + "subtask_index": 61 + }, + { + "subtask": "Grasp the tape with the left gripper", + "subtask_index": 62 + }, + { + "subtask": "Place the duck toy in the bowl with the right gripper", + "subtask_index": 63 + }, + { + "subtask": "Grasp the square chewing gum with the right gripper", + "subtask_index": 64 + }, + { + "subtask": "Place the blue pot in the bowl with the left gripper", + "subtask_index": 65 + }, + { + "subtask": "Grasp the shower sphere with the right gripper", + "subtask_index": 66 + }, + { + "subtask": "Place the plugboard in the bowl with the left gripper", + "subtask_index": 67 + }, + { + "subtask": "Place the coke in the bowl with the right gripper", + "subtask_index": 68 + }, + { + "subtask": "Place the round bread in the bowl with the left gripper", + "subtask_index": 69 + }, + { + "subtask": "Place the blue cup in the bowl with the left gripper", + "subtask_index": 70 + }, + { + "subtask": "Place the blue pot in the bowl with the right gripper", + "subtask_index": 71 + }, + { + "subtask": "Grasp the round bread with the left gripper", + "subtask_index": 72 + }, + { + "subtask": "Grasp the chocolate with the left gripper", + "subtask_index": 73 + }, + { + "subtask": "Grasp the square wooden block with the left gripper", + "subtask_index": 74 + }, + { + "subtask": "Place the yogurt in the bowl with the right gripper", + "subtask_index": 75 + }, + { + "subtask": "Grasp the blue cup with the right gripper", + "subtask_index": 76 + }, + { + "subtask": "Grasp the soft facial cleanser with the right gripper", + "subtask_index": 77 + }, + { + "subtask": "Place the compasses in the bowl with the left gripper", + "subtask_index": 78 + }, + { + "subtask": "Grasp the square wooden block with the right gripper", + "subtask_index": 79 + }, + { + "subtask": "Place the brown towel in the bowl with the left gripper", + "subtask_index": 80 + }, + { + "subtask": "Grasp the round wooden block with the right gripper", + "subtask_index": 81 + }, + { + "subtask": "Grasp the banana with the right gripper", + "subtask_index": 82 + }, + { + "subtask": "null", + "subtask_index": 83 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Galaxea_R1_Lite" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_left_rgb", + "cam_head_right_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], "statistics": { - "total_episodes": 116, - "total_frames": 91452, - "total_tasks": 1, - "total_videos": 464, + "total_episodes": 101, + "total_frames": 23706, + "fps": 30, + "total_tasks": 84, + "total_videos": 404, "total_chunks": 1, "chunks_size": 1000, - "fps": 30 + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "909.34 MB" }, - "dataset_uuid": "f687e0d9-d736-45a4-89f6-ef7ff90a103a", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "End", - "Press the button blocked by the bottle with right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" + "frame_num": 23706, + "dataset_size": "909.34 MB", + "data_structure": "Galaxea_R1_Lite_storage_object_brown_bowl_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (89 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:100" }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" + "features": { + "observation.images.cam_head_left_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "alpha_bot_2_press_the_button_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "alpha_bot_2_press_the_button_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "alpha_bot_2_recover_after_touching_an_obstacle": { - "path": "alpha_bot_2_recover_after_touching_an_obstacle", - "dataset_name": "recover_after_touching_an_obstacle", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick" - ], - "tasks": "Touch the bottle with left gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null }, - { - "object_name": "mineral_water", - "level1": "drinks", - "level2": "mineral_water", - "level3": null, - "level4": null, - "level5": null + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" }, - { - "object_name": "button", - "level1": "toy", - "level2": "button", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-128391", - "dataset_size": "3.7GB", - "statistics": { - "total_episodes": 130, - "total_frames": 128391, - "total_tasks": 1, - "total_videos": 520, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "aebc9a40-0a90-4fc3-a705-fbc5c7d23392", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Touch the bottle with left gripper", - "End", - "Touch the bottle with right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "alpha_bot_2_recover_after_touching_an_obstacle_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "alpha_bot_2_recover_after_touching_an_obstacle_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "leju_robot_pass_the_cleaner_a": { - "path": "leju_robot_pass_the_cleaner_a", - "dataset_name": "pass_the_cleaner_a", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Turn the bottle to the front side.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" }, - { - "object_name": "cleaner", - "level1": "daily_necessities", - "level2": "cleaner", - "level3": null, - "level4": null, - "level5": null + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" } - ], - "operation_platform_height": null, - "frame_range": "0-169467", - "dataset_size": "8.7GB", - "statistics": { - "total_episodes": 457, - "total_frames": 169467, - "total_tasks": 1, - "total_videos": 1371, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "870e2a78-0b89-486a-bed4-12863a5b00d6", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Turn the bottle to the front side.", - "Pick up the bottle from the table.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" }, "authors": { "contributed_by": [ { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" } ] }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_pass_the_cleaner_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_pass_the_cleaner_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Galbot_g1_steamer_storage_baozi_a": { - "path": "Galbot_g1_steamer_storage_baozi_a", - "dataset_name": "steamer_storage_baozi_a", + "Cobot_Magic_cube_reset": { + "path": "Cobot_Magic_cube_reset", + "dataset_name": "cube_reset", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -95421,51 +105302,35 @@ "level5": null }, { - "object_name": "baozi", - "level1": "food", - "level2": "baozi", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "steamer", - "level1": "cookware", - "level2": "steamer", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pot_lid", - "level1": "daily_necessities", - "level2": "pot_lid", + "object_name": "cube_block", + "level1": "toy", + "level2": "cube_block", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", + "object_name": "cube_groove", + "level1": "container", + "level2": "cube_groove", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-250217", - "dataset_size": "4.3GB", + "operation_platform_height": 77.2, + "frame_range": "0-28883", + "dataset_size": "514.5MB", "statistics": { - "total_episodes": 266, - "total_frames": 250217, + "total_episodes": 98, + "total_frames": 28883, "total_tasks": 1, - "total_videos": 798, + "total_videos": 294, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "1e479c19-9f69-42ac-9238-114fcf1a487c", + "dataset_uuid": "5f4b3baf-119d-489b-b64d-59ea1b989422", "language": [ "en", "zh" @@ -95475,10 +105340,10 @@ ], "sub_tasks": [ "End", - "Place the baozi on the steamer with right gripper", - "Place the pot lid on the steamer with left gripper", - "Grasp the pot lid with left gripper", - "Grasp the baozi in the plate with right gripper", + "Pick up the cube", + "Grasp the cube", + "Place cube into the cube recess", + "Place cube into the cube-shaped recess", "null" ], "annotations": { @@ -95516,10 +105381,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Galbot_g1_steamer_storage_baozi_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Galbot_g1_steamer_storage_baozi_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Cobot_Magic_cube_reset_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_cube_reset_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_unplug": { + "Airbot_MMK2_take_book": { "task_categories": [ "robotics" ], @@ -95549,11 +105414,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_unplug", + "dataset_name": "Airbot_MMK2_take_book", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "office_workspace", - "level2": "office", + "level1": "household", + "level2": "study_room", "level3": null, "level4": null, "level5": null @@ -95561,17 +105426,9 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "power_strips", - "level1": "appliances", - "level2": "power_strips", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "charger", - "level1": "appliances", - "level2": "charger", + "object_name": "book", + "level1": "stationery", + "level2": "book", "level3": null, "level4": null, "level5": null @@ -95579,7 +105436,7 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "the left hand secures the power strip, while the right hand unplugs the charger and sets it down." + "take a book from the forward pile with hand." ], "sub_tasks": [ { @@ -95587,31 +105444,24 @@ "subtask_index": 0 }, { - "subtask": "Release socket with the left hand", + "subtask": "Place the book on the table with the right gripper", "subtask_index": 1 }, { - "subtask": "Put down plug with the right hand", + "subtask": "Hook the third book on the right with the right gripper", "subtask_index": 2 }, - { - "subtask": "Press and hold the socket with the left hand", - "subtask_index": 3 - }, - { - "subtask": "Unplug plug with the right hand", - "subtask_index": 4 - }, { "subtask": "null", - "subtask_index": 5 + "subtask_index": 3 } ], "atomic_actions": [ - "uncap", - "presss", - "place", - "pick" + "clip", + "pull", + "pick", + "takeout", + "place" ], "robot_name": [ "Airbot_MMK2" @@ -95645,23 +105495,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 90, - "total_frames": 21487, + "total_episodes": 98, + "total_frames": 28489, "fps": 30, - "total_tasks": 6, - "total_videos": 360, + "total_tasks": 4, + "total_videos": 392, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "870.71 MB" + "dataset_size": "1.03 GB" }, - "frame_num": 21487, - "dataset_size": "870.71 MB", - "data_structure": "Airbot_MMK2_unplug_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (78 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 28489, + "dataset_size": "1.03 GB", + "data_structure": "Airbot_MMK2_take_book_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:89" + "train": "0:97" }, "features": { "observation.images.cam_head_rgb": { @@ -96015,128 +105865,19 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "G1edu-u3_pullBowl_storage_bread_a": { - "path": "G1edu-u3_pullBowl_storage_bread_a", - "dataset_name": "pullBowl_storage_bread_a", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Abnormal", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate_rack", - "level1": "furniture", - "level2": "plate_rack", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-126861", - "dataset_size": "2.6GB", - "statistics": { - "total_episodes": 178, - "total_frames": 126861, - "total_tasks": 1, - "total_videos": 534, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "11ed8a7f-106a-4c08-98a4-0abdd1b97a52", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Abnormal", - "Place the long bread in pink bowl with left hand", - "End", - "Grasp the round bread with left hand", - "Grasp the long bread with left hand", - "Place the round bread in pink bowl with left hand", - "Move the pink bowl to the center of table with right hand", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_pullBowl_storage_bread_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_pullBowl_storage_bread_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "R1_Lite_move_the_position_of_the_brush": { - "path": "R1_Lite_move_the_position_of_the_brush", - "dataset_name": "move_the_position_of_the_brush", + "Split_aloha_fold_the_pants": { + "path": "Split_aloha_fold_the_pants", + "dataset_name": "fold_the_pants", "robot_type": "", "end_effector_type": [ "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ - "place", - "pick", - "grasp" + "grasp", + "fold" ], - "tasks": "Static", + "tasks": "Fold the bottom of the pants upward", "objects": [ { "object_name": "table", @@ -96147,355 +105888,27 @@ "level5": null }, { - "object_name": "pen", - "level1": "office_supplies", - "level2": "pen", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "fruit", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bath_ball", - "level1": "daily_necessities", - "level2": "bath_ball", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "can", - "level1": "container", - "level2": "can", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "eraser", - "level1": "office_supplies", - "level2": "eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "hard_cleanser", - "level1": "daily_necessities", - "level2": "hard_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "long_bread", - "level1": "food", - "level2": "long_bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peeler", - "level1": "tool", - "level2": "peeler", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "block", - "level1": "toy", - "level2": "block", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "duck", - "level1": "toy", - "level2": "duck", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soap", - "level1": "daily_necessities", - "level2": "soap", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "container", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "basket", - "level1": "container", - "level2": "basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cola", - "level1": "drink", - "level2": "cola", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "detergent", - "level1": "daily_necessities", - "level2": "detergent", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_beater", - "level1": "electrical_appliances", - "level2": "egg_beater", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "towel", + "object_name": "pants", "level1": "clothing", - "level2": "towel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "orange", - "level1": "fruit", - "level2": "orange", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peach", - "level1": "fruit", - "level2": "peach", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "marker", - "level1": "office_supplies", - "level2": "marker", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "rubiks_cube", - "level1": "toy", - "level2": "rubiks_cube", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bread_slice", - "level1": "food", - "level2": "bread_slice", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "brush", - "level1": "daily_necessities", - "level2": "brush", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yogurt", - "level1": "drink", - "level2": "yogurt", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "power_strip", - "level1": "electric_appliance", - "level2": "power_strip", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "milk", - "level1": "drink", - "level2": "milk", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soda", - "level1": "drink", - "level2": "soda", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "lime", - "level1": "fruit", - "level2": "lime", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "coffee_capsule", - "level1": "drink", - "level2": "coffee_capsule", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "dish", - "level1": "container", - "level2": "dish", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "glass", - "level1": "furniture", - "level2": "glass", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_yolk_pastry", - "level1": "food", - "level2": "egg_yolk_pastry", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "glasses_case", - "level1": "daily_necessities", - "level2": "glasses_case", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "gum", - "level1": "daily_necessities", - "level2": "gum", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tape", - "level1": "daily_necessities", - "level2": "tape", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soft_cleanser", - "level1": "daily_necessities", - "level2": "soft_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chips", - "level1": "food", - "level2": "chips", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chocolate", - "level1": "food", - "level2": "chocolate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cookie", - "level1": "food", - "level2": "cookie", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "spoon", - "level1": "tableware", - "level2": "spoon", + "level2": "pants", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-17774", - "dataset_size": "677.0MB", + "frame_range": "0-402577", + "dataset_size": "8.5GB", "statistics": { - "total_episodes": 63, - "total_frames": 17774, - "total_tasks": 1, - "total_videos": 252, + "total_episodes": 486, + "total_frames": 402577, + "total_tasks": 4, + "total_videos": 1458, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "0626d7f7-69b7-4fb0-9edc-8ba92d1df7cf", + "dataset_uuid": "2d20404b-b035-4962-9e8e-055858867cb3", "language": [ "en", "zh" @@ -96504,13 +105917,9 @@ "robotics" ], "sub_tasks": [ - "Static", - "Place the brush on the table with right gripper", - "Grasp the brush with right gripper", - "Grasp the brush with left gripper", - "Place the brush on the table with left gripper", - "End", - "Abnormal", + "Fold the bottom of the pants upward", + "Fold the pants from left to right", + "Fold your pants from right to left", "null" ], "annotations": { @@ -96548,10 +105957,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_move_the_position_of_the_brush_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_move_the_position_of_the_brush_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Split_aloha_fold_the_pants_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Split_aloha_fold_the_pants_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Galaxea_R1_Lite_pour_solid": { + "Airbot_MMK2_close_lid": { "task_categories": [ "robotics" ], @@ -96581,11 +105990,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_pour_solid", + "dataset_name": "Airbot_MMK2_close_lid", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "ousehhold", - "level2": "kitchen", + "level1": "household", + "level2": "study_room", "level3": null, "level4": null, "level5": null @@ -96593,41 +106002,9 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "white_table", - "level1": "furniture", - "level2": "white_table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plastic_cup", - "level1": "cups", - "level2": "plastic_cup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "green_dish", - "level1": "plates", - "level2": "green_dish", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pink_bowl", - "level1": "plastic_bowls", - "level2": "pink_bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "solid", - "level1": "materials", - "level2": "solid", + "object_name": "any_storage_box", + "level1": "storage utensils", + "level2": "any_storage_box", "level3": null, "level4": null, "level5": null @@ -96635,172 +106012,57 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "use a gripper to pick up the cup and pour the solid into a bowl or tray." - ], - "sub_tasks": [ - { - "subtask": "Grasp the glass of shrimp with the left gripper", - "subtask_index": 0 - }, - { - "subtask": "Pick up blue cup filled with coffee beans with left gripper", - "subtask_index": 1 - }, - { - "subtask": "Place blue cup with coffee beans on the table with left gripper", - "subtask_index": 2 - }, - { - "subtask": "Pour the shrimp into the green bowl with the left gripper", - "subtask_index": 3 - }, - { - "subtask": "Pour the coffee beans into the pink bowl with the left gripper", - "subtask_index": 4 - }, - { - "subtask": "Pour the coffee beans into the pink bowl with the right gripper", - "subtask_index": 5 - }, - { - "subtask": "Left gripper", - "subtask_index": 6 - }, - { - "subtask": "Pour the shrimp into the green bowl with the right gripper", - "subtask_index": 7 - }, - { - "subtask": "Pick up blue cup filled with shrimp with left gripper", - "subtask_index": 8 - }, - { - "subtask": "Place blue cup with shrimp on the table with left gripper", - "subtask_index": 9 - }, - { - "subtask": "Pick up blue cup filled with coffee beans with right gripper", - "subtask_index": 10 - }, - { - "subtask": "Grasp the glass of coffee beans with the left gripper", - "subtask_index": 11 - }, - { - "subtask": "Pour the shrimp into the pink bowl with left gripper", - "subtask_index": 12 - }, - { - "subtask": "Pour the coffee beans into the green bowl with the left gripper", - "subtask_index": 13 - }, - { - "subtask": "Pour the coffee beans into the pink bowl with left gripper", - "subtask_index": 14 - }, - { - "subtask": "Grasp the glass of coffee beans with the right gripper", - "subtask_index": 15 - }, - { - "subtask": "Pour the coffee beans into the green plate with the right gripper", - "subtask_index": 16 - }, - { - "subtask": "Pour the shrimp into the pink bowl with the right gripper", - "subtask_index": 17 - }, - { - "subtask": "Grasp the glass of shrimp with the right gripper", - "subtask_index": 18 - }, - { - "subtask": "Pour the coffee beans into the green plate with the left gripper", - "subtask_index": 19 - }, - { - "subtask": "Place blue cup with coffee powder on the table with left gripper", - "subtask_index": 20 - }, - { - "subtask": "Place the glass cup with the right gripper", - "subtask_index": 21 - }, - { - "subtask": "Place blue cup with coffee beans on the table with right gripper", - "subtask_index": 22 - }, - { - "subtask": "End", - "subtask_index": 23 - }, - { - "subtask": "Pour the coffee beans into the green bowl with the right gripper", - "subtask_index": 24 - }, - { - "subtask": "Place blue cup with shrimp on the table with right gripper", - "subtask_index": 25 - }, - { - "subtask": "Pick up blue cup filled with shrimp with right gripper", - "subtask_index": 26 - }, - { - "subtask": "Pour the shrimp into the pink bowl with right gripper", - "subtask_index": 27 - }, + "close the box lid by hand." + ], + "sub_tasks": [ { - "subtask": "Pour the shrimp into the pink bowl with the left gripper", - "subtask_index": 28 + "subtask": "Touch the box lid with the left gripper", + "subtask_index": 0 }, { - "subtask": "Pour the shrimp into the blue basin with left gripper", - "subtask_index": 29 + "subtask": "Close the box lid with the left gripper", + "subtask_index": 1 }, { - "subtask": "Pour the coffee beans into the blue basin with right gripper", - "subtask_index": 30 + "subtask": "End", + "subtask_index": 2 }, { - "subtask": "Right gripper", - "subtask_index": 31 + "subtask": "Touch the box lid with the right gripper", + "subtask_index": 3 }, { - "subtask": "Pour the shrimp into the blue basin with right gripper", - "subtask_index": 32 + "subtask": "Abnormal", + "subtask_index": 4 }, { - "subtask": "Place the glass cup with the left gripper", - "subtask_index": 33 + "subtask": "Close the box lid with the right gripper", + "subtask_index": 5 }, { "subtask": "null", - "subtask_index": 34 + "subtask_index": 6 } ], "atomic_actions": [ - "grasp", - "pick", - "place", - "pour" + "close" ], "robot_name": [ - "Galaxea_R1_Lite" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -96817,30 +106079,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 39, - "total_frames": 16353, + "total_episodes": 239, + "total_frames": 28930, "fps": 30, - "total_tasks": 35, - "total_videos": 156, + "total_tasks": 7, + "total_videos": 956, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, + "state_dim": 36, + "action_dim": 36, "camera_views": 4, - "dataset_size": "459.77 MB" + "dataset_size": "965.25 MB" }, - "frame_num": 16353, - "dataset_size": "459.77 MB", - "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_pour_solid_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (27 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 28930, + "dataset_size": "965.25 MB", + "data_structure": "Airbot_MMK2_close_lid_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (227 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:38" + "train": "0:238" }, "features": { - "observation.images.cam_head_left_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -96849,8 +106111,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -96859,11 +106121,11 @@ "has_audio": false } }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -96872,8 +106134,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -96882,10 +106144,10 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -96895,7 +106157,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -96905,10 +106167,10 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_front_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -96918,7 +106180,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -96931,7 +106193,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -96946,14 +106208,36 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -96968,8 +106252,30 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -97120,66 +106426,6 @@ 2 ], "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" } }, "authors": { @@ -97201,127 +106447,11 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" - }, - "R1_Lite_boil_water_in_a_kettle": { - "path": "R1_Lite_boil_water_in_a_kettle", - "dataset_name": "boil_water_in_a_kettle", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Start boiling water", - "objects": [ - { - "object_name": "stove_top", - "level1": "furniture", - "level2": "stove_top", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "boil_the_kettle", - "level1": "electric_appliance", - "level2": "boil_the_kettle", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "heating_plate", - "level1": "electric_appliance", - "level2": "heating_plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "sink", - "level1": "container", - "level2": "sink", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-85792", - "dataset_size": "3.9GB", - "statistics": { - "total_episodes": 75, - "total_frames": 85792, - "total_tasks": 1, - "total_videos": 225, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "68764373-ee5e-45aa-ad8a-1ca6cf3e0834", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Start boiling water", - "Place it on the power base", - "Finish boiling water", - "abnormal", - "Place it by the sink", - "Pick up the kettle", - "Grab the kettle", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_boil_water_in_a_kettle_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_boil_water_in_a_kettle_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "G1edu-u3_place_apple_c": { - "path": "G1edu-u3_place_apple_c", - "dataset_name": "place_apple_c", + "G1edu-u3_place_metal_bowl_ae": { + "path": "G1edu-u3_place_metal_bowl_ae", + "dataset_name": "place_metal_bowl_ae", "robot_type": "", "end_effector_type": [ "three_finger_hand" @@ -97331,7 +106461,7 @@ "pick", "place" ], - "tasks": "End", + "tasks": "Place the metal bowl on the table with left gripper", "objects": [ { "object_name": "table", @@ -97342,27 +106472,27 @@ "level5": null }, { - "object_name": "apple", - "level1": "fruit", - "level2": "apple", + "object_name": "metal_bowl", + "level1": "bowl", + "level2": "metal_bowl", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-6567", - "dataset_size": "81.6MB", + "frame_range": "0-7884", + "dataset_size": "120.4MB", "statistics": { - "total_episodes": 29, - "total_frames": 6567, + "total_episodes": 38, + "total_frames": 7884, "total_tasks": 1, - "total_videos": 29, + "total_videos": 38, "total_chunks": 1, - "chunks_size": 30, + "chunks_size": 39, "fps": 30 }, - "dataset_uuid": "fadd4ee3-65b3-497e-a817-3885814cab0c", + "dataset_uuid": "58ed5982-2a5f-4762-a995-5d590a2beeb4", "language": [ "en", "zh" @@ -97371,9 +106501,9 @@ "robotics" ], "sub_tasks": [ + "Place the metal bowl on the table with left gripper", "End", - "Place the apple on the table with right gripper", - "Place the apple on the table with left gripper", + "Place the metal bowl on the table with right gripper", "null" ], "annotations": { @@ -97411,10 +106541,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "G1edu-u3_place_apple_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_place_apple_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "G1edu-u3_place_metal_bowl_ae_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_place_metal_bowl_ae_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_move_block_both_hands": { + "Airbot_MMK2_storage_tape_measure_umbrella": { "task_categories": [ "robotics" ], @@ -97444,7 +106574,7 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_move_block_both_hands", + "dataset_name": "Airbot_MMK2_storage_tape_measure_umbrella", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", @@ -97456,9 +106586,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "square_building_blocks", - "level1": "toys", - "level2": "square_building_blocks", + "object_name": "basin", + "level1": "storage_utensils", + "level2": "basin", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tape_measure", + "level1": "rulers", + "level2": "tape_measure", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "umbrella", + "level1": "items", + "level2": "umbrella", "level3": null, "level4": null, "level5": null @@ -97466,32 +106612,36 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the building blocks with both hands simultaneously and place them on the blocks." + "put the tape measure and the umbrella in the basin." ], "sub_tasks": [ { - "subtask": "Place the blue block on top of the red block with the left gripper", + "subtask": "End", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Grasp the tape measure with the right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the green block with the right gripper", + "subtask": "Grasp the umbrella with the left gripper", "subtask_index": 2 }, { - "subtask": "Grasp the blue block with the left gripper", + "subtask": "Static", "subtask_index": 3 }, { - "subtask": "Place the green block on top of the yellow block with the right gripper", + "subtask": "Place the umbrella in the white basket with the left gripper", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "Place the tape measure in the white basket with the right gripper", "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 } ], "atomic_actions": [ @@ -97531,23 +106681,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 54, - "total_frames": 8888, + "total_episodes": 40, + "total_frames": 7112, "fps": 30, - "total_tasks": 6, - "total_videos": 216, + "total_tasks": 7, + "total_videos": 160, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "388.10 MB" + "dataset_size": "236.10 MB" }, - "frame_num": 8888, - "dataset_size": "388.10 MB", - "data_structure": "Airbot_MMK2_move_block_both_hands_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (42 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 7112, + "dataset_size": "236.10 MB", + "data_structure": "Airbot_MMK2_storage_tape_measure_umbrella_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (28 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:53" + "train": "0:39" }, "features": { "observation.images.cam_head_rgb": { @@ -97901,140 +107051,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "G1edu-u3_food_storage": { - "path": "G1edu-u3_food_storage", - "dataset_name": "food_storage", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Grasp the bowl with the left gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "basket", - "level1": "home_storage", - "level2": "basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bowl", - "level1": "bowl", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "donut", - "level1": "food", - "level2": "donut", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cake", - "level1": "bread", - "level2": "cake", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-316377", - "dataset_size": "13.6GB", - "statistics": { - "total_episodes": 190, - "total_frames": 316377, - "total_tasks": 1, - "total_videos": 570, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "4f2a72fa-3388-401a-ab17-b437417cce23", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the bowl with the left gripper", - "Pour the cake into the basket with the left gripper", - "End", - "Place the bowl in the center of the table with the left gripper", - "Pick up the bread with the right gripper and place it on the plate", - "Place the bowl on the table with the left gripper", - "Push the bowl to the center of the table using the left gripper", - "Abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_food_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_food_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Galaxea_R1_Lite_storage_object_brown_bowl": { + "Galaxea_R1_Lite_classify_object_green_tablecloth": { "task_categories": [ "robotics" ], @@ -98064,11 +107081,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_storage_object_brown_bowl", + "dataset_name": "Galaxea_R1_Lite_classify_object_green_tablecloth", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "living_room", + "level1": "commercial_convenience", + "level2": "supermarket", "level3": null, "level4": null, "level5": null @@ -98076,562 +107093,698 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "brown_bowl", - "level1": "plastic_bowl", - "level2": "brown_bowl", + "object_name": "brown_basket", + "level1": "baskets", + "level2": "brown_basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "banana", - "level1": "fruits", - "level2": "banana", + "object_name": "yellow_basket", + "level1": "baskets", + "level2": "yellow_basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "bathing_in_flowers", - "level1": "daily_chemical_products", - "level2": "bathing_in_flowers", + "object_name": "any_fruits", + "level1": "fruits", + "level2": "any_fruits", "level3": null, "level4": null, "level5": null }, { - "object_name": "blue_cup", - "level1": "cups", - "level2": "blue_cup", + "object_name": "any_vegetables", + "level1": "vegetables", + "level2": "any_vegetables", "level3": null, "level4": null, "level5": null }, { - "object_name": "blue_pot", - "level1": "cookware", - "level2": "blue_pot", + "object_name": "any_snacks", + "level1": "snacks", + "level2": "any_snacks", "level3": null, "level4": null, "level5": null }, { - "object_name": "toast_slices", + "object_name": "any_bread", "level1": "bread", - "level2": "toast_slices", + "level2": "any_bread", "level3": null, "level4": null, "level5": null }, { - "object_name": "brown_towel", - "level1": "towels", - "level2": "brown_towel", + "object_name": "green_table_cloths", + "level1": "table_cloths", + "level2": "green_table_cloths", "level3": null, "level4": null, "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "on the green table,place the food in the right basket with the right gripper, and place the non food items in the left basket with the left gripper." + ], + "sub_tasks": [ + { + "subtask": "Grasp the round bread and put it in the right basket", + "subtask_index": 0 }, { - "object_name": "can", - "level1": "snacks", - "level2": "can", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the can and put it in the right basket", + "subtask_index": 1 }, { - "object_name": "coke(slim_can)", - "level1": "beveragesbeverages", - "level2": "coke(slim_can)", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the compass and put it in the left basket", + "subtask_index": 2 }, { - "object_name": "potato_chips", - "level1": "snacks", - "level2": "potato_chips", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the soft cleanser and put it in the left basket", + "subtask_index": 3 }, { - "object_name": "chocolate", - "level1": "snacks", - "level2": "chocolate", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the orange and put it in the right basket", + "subtask_index": 4 }, { - "object_name": "compass", - "level1": "rulers", - "level2": "compass", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the round chewing gum and put it in the right basket", + "subtask_index": 5 }, { - "object_name": "block_pillar", - "level1": "building_blocks", - "level2": "block_pillar", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the playing cards and put it in the left basket", + "subtask_index": 6 }, { - "object_name": "egg_beater", - "level1": "spoons_and_spatulas", - "level2": "egg_beater", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the purple garbage bag and put it in the left basket", + "subtask_index": 7 }, { - "object_name": "eraser", - "level1": "erasers", - "level2": "eraser", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the back scratcher in the center of the table", + "subtask_index": 8 }, { - "object_name": "chewing_gum", - "level1": "snacks", - "level2": "chewing_gum", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the eggplant in the center of the table", + "subtask_index": 9 }, { - "object_name": "mentholatum_facial_cleanser", - "level1": "daily_chemical_products", - "level2": "mentholatum_facial_cleanser", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the square chewing gum and put it in the right basket", + "subtask_index": 10 }, { - "object_name": "green_lemon", - "level1": "fruits", - "level2": "green_lemon", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the washing liquid and put it in the left basket", + "subtask_index": 11 }, { - "object_name": "peach", - "level1": "fruits", - "level2": "peach", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the red pot and put it in the left basket", + "subtask_index": 12 }, { - "object_name": "power_strip", - "level1": "electrical_control_equipment", - "level2": "power_strip", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the grape and put it in the right basket", + "subtask_index": 13 }, { - "object_name": "round_bread", - "level1": "bread", - "level2": "round_bread", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the tape and put it in the left basket", + "subtask_index": 14 }, { - "object_name": "mentholatum_facial_cleanser", - "level1": "daily_chemical_products", - "level2": "mentholatum_facial_cleanser", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the shampoo and put it in the left basket", + "subtask_index": 15 }, { - "object_name": "square_building_blocks", - "level1": "building_blocks", - "level2": "square_building_blocks", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the playing cards and put it in the left basket", + "subtask_index": 16 }, { - "object_name": "tape", - "level1": "stationery", - "level2": "tape", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the orange in the center of the table", + "subtask_index": 17 }, { - "object_name": "cake", - "level1": "bread", - "level2": "cake", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the eggplant and put it in the right basket", + "subtask_index": 18 }, { - "object_name": "duck", - "level1": "doll", - "level2": "duck", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pick up the banana and put it in the right basket", + "subtask_index": 19 }, { - "object_name": "ambrosial_yogurt", - "level1": "beverages", - "level2": "ambrosial_yogurt", - "level3": null, - "level4": null, - "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "use a gripper to pick the target object and place on the brown bowl." - ], - "sub_tasks": [ + "subtask": "Grasp the washing liquid and put it in the left basket", + "subtask_index": 20 + }, { - "subtask": "Grasp the blue pot with the left gripper", - "subtask_index": 0 + "subtask": "Grasp the shampoo and put it in the left basket", + "subtask_index": 21 }, { - "subtask": "Place the coke in the bowl with the left gripper", - "subtask_index": 1 + "subtask": "Place the pink bowl in the center of the table", + "subtask_index": 22 }, { - "subtask": "Grasp the plugboard with the left gripper", - "subtask_index": 2 + "subtask": "Place the compass in the center of the table", + "subtask_index": 23 }, { - "subtask": "Place the back scratcher in the bowl with the right gripper", - "subtask_index": 3 + "subtask": "Place the peach in the center of the table", + "subtask_index": 24 + }, + { + "subtask": "Place the banana in the center of the table", + "subtask_index": 25 + }, + { + "subtask": "Place the tape in the center of the table", + "subtask_index": 26 + }, + { + "subtask": "Pick up the round bread and put it in the left basket", + "subtask_index": 27 + }, + { + "subtask": "Pick up the tea cup and put it in the left basket", + "subtask_index": 28 + }, + { + "subtask": "Pick up the round bread and put it in the right basket", + "subtask_index": 29 + }, + { + "subtask": "Grasp the banana and put it in the right basket", + "subtask_index": 30 + }, + { + "subtask": "Grasp the pear and put it in the right basket", + "subtask_index": 31 + }, + { + "subtask": "Place the lemon and put it in the center of the table", + "subtask_index": 32 + }, + { + "subtask": "Grasp the compass and put it in the right basket", + "subtask_index": 33 + }, + { + "subtask": "Pick up the can and put it in the right basket", + "subtask_index": 34 + }, + { + "subtask": "Place the red pot in the center of the table", + "subtask_index": 35 + }, + { + "subtask": "Pick up the yellow marker and put it in the left basket", + "subtask_index": 36 + }, + { + "subtask": "Grasp the yellow cake and put it in the right basket", + "subtask_index": 37 + }, + { + "subtask": "Grasp the long bread and put it in the right basket", + "subtask_index": 38 + }, + { + "subtask": "Grasp the canned cola and put it in the right basket", + "subtask_index": 39 + }, + { + "subtask": "Grasp the glasses case and put it in the left basket", + "subtask_index": 40 + }, + { + "subtask": "Pick up the lime and put it in the right basket", + "subtask_index": 41 + }, + { + "subtask": "Pick up the pear and put it in the right basket", + "subtask_index": 42 + }, + { + "subtask": "Grasp the green chewing gum and put it in the right basket", + "subtask_index": 43 + }, + { + "subtask": "Pick up the croissant and put it in the right basket", + "subtask_index": 44 + }, + { + "subtask": "Pick up the chinese cabbage and put it in the right basket", + "subtask_index": 45 + }, + { + "subtask": "Place the fruit fudge in the center of the table", + "subtask_index": 46 + }, + { + "subtask": "Grasp the white eraser and put it in the left basket", + "subtask_index": 47 + }, + { + "subtask": "Place the yellow marker in the center of the table", + "subtask_index": 48 + }, + { + "subtask": "Place the round chewing gum in the center of the table", + "subtask_index": 49 + }, + { + "subtask": "Pick up the canned cola and put it in the right basket", + "subtask_index": 50 + }, + { + "subtask": "Pick up the red pot and put it in the left basket", + "subtask_index": 51 + }, + { + "subtask": "Place the green lemon in the center of the table", + "subtask_index": 52 + }, + { + "subtask": "Grasp the pink marker and put it in the left basket", + "subtask_index": 53 + }, + { + "subtask": "Place the washing liquid in the center of the table", + "subtask_index": 54 + }, + { + "subtask": "Abnormal", + "subtask_index": 55 + }, + { + "subtask": "Grasp the peach and put it in the right basket", + "subtask_index": 56 + }, + { + "subtask": "Grasp the soap and put it in the left basket", + "subtask_index": 57 + }, + { + "subtask": "Place the Red Bull in the center of the table", + "subtask_index": 58 + }, + { + "subtask": "Place the mango in the center of the table", + "subtask_index": 59 + }, + { + "subtask": "Grasp the fruit fudge and put it in the right basket", + "subtask_index": 60 + }, + { + "subtask": "Grasp the bath ball and put it in the left basket", + "subtask_index": 61 + }, + { + "subtask": "Grasp the red bull and put it in the right basket", + "subtask_index": 62 + }, + { + "subtask": "Pick up the lemon and put it in the right basket", + "subtask_index": 63 + }, + { + "subtask": "Grasp the chinese cabbage and put it in the right basket", + "subtask_index": 64 + }, + { + "subtask": "Grasp the pink pot and put it in the left basket", + "subtask_index": 65 + }, + { + "subtask": "Grasp the croissant and put it in the right basket", + "subtask_index": 66 + }, + { + "subtask": "Grasp the soda water and put it in the right basket", + "subtask_index": 67 + }, + { + "subtask": "Pick up the egg yolk pastry and put it in the right basket", + "subtask_index": 68 + }, + { + "subtask": "Grasp the spoon and put it in the left basket", + "subtask_index": 69 + }, + { + "subtask": "Grasp the blue cup and put it in the left basket", + "subtask_index": 70 + }, + { + "subtask": "Place the blue towel in the center of the table", + "subtask_index": 71 + }, + { + "subtask": "Place the yellow cake in the center of the table", + "subtask_index": 72 + }, + { + "subtask": "Place the grey cup in the center of the table", + "subtask_index": 73 + }, + { + "subtask": "Grasp the tape and put it in the left basket", + "subtask_index": 74 + }, + { + "subtask": "Place the square chewing gum in the center of the table", + "subtask_index": 75 + }, + { + "subtask": "Pick up the milk and put it in the right basket", + "subtask_index": 76 + }, + { + "subtask": "Grasp the back scratcher and put it in the right basket", + "subtask_index": 77 }, { - "subtask": "Place the plugboard in the bowl with the right gripper", - "subtask_index": 4 + "subtask": "Grasp the yogurt and put it in the right basket", + "subtask_index": 78 }, { - "subtask": "Place the compasses in the bowl with the right gripper", - "subtask_index": 5 + "subtask": "Pick up the yellow cake and put it in the right basket", + "subtask_index": 79 }, { - "subtask": "Grasp the potato chips with the right gripper", - "subtask_index": 6 + "subtask": "Pick up the blue bowl and put it in the left basket", + "subtask_index": 80 }, { - "subtask": "Grasp the banana with the left gripper", - "subtask_index": 7 + "subtask": "Grasp the lemon and put it in the right basket", + "subtask_index": 81 }, { - "subtask": "Place the square chewing gum in the bowl with the right gripper", - "subtask_index": 8 + "subtask": "Grasp the back scratcher and put it in the left basket", + "subtask_index": 82 }, { - "subtask": "Place the round wooden block in the bowl with the right gripper", - "subtask_index": 9 + "subtask": "Grasp the orange and put it in the right basket", + "subtask_index": 83 }, { - "subtask": "Place the tin in the bowl with the left gripper", - "subtask_index": 10 + "subtask": "Pick up the soda water and put it in the right basket", + "subtask_index": 84 }, { - "subtask": "Grasp the compasses with the right gripper", - "subtask_index": 11 + "subtask": "Grasp the red pot and put it in the right basket", + "subtask_index": 85 }, { - "subtask": "Grasp the duck toy with the left gripper", - "subtask_index": 12 + "subtask": "Grasp the milk and put it in the right basket", + "subtask_index": 86 }, { - "subtask": "Place the potato chips in the bowl with the left gripper", - "subtask_index": 13 + "subtask": "Pick up the blue cup and put it in the left basket", + "subtask_index": 87 }, { - "subtask": "Grasp the blue cup with the left gripper", - "subtask_index": 14 + "subtask": "Pick up the mango and put it in the right basket", + "subtask_index": 88 }, { - "subtask": "Place the chocolate cake in the bowl with the right gripper", - "subtask_index": 15 + "subtask": "Place the pink marker in the center of the table", + "subtask_index": 89 }, { - "subtask": "Grasp the back scratcher with the right gripper", - "subtask_index": 16 + "subtask": "Grasp the grey cup and put it in the left basket", + "subtask_index": 90 }, { - "subtask": "Place the square wooden block in the bowl with the right gripper", - "subtask_index": 17 + "subtask": "Pick up the long bread and put it in the right basket", + "subtask_index": 91 }, { - "subtask": "Grasp the chocolate cake with the right gripper", - "subtask_index": 18 + "subtask": "Pick up the soap and put it in the left basket", + "subtask_index": 92 }, { - "subtask": "Grasp the shower sphere with the left gripper", - "subtask_index": 19 + "subtask": "Grasp the peach and put it in the left basket", + "subtask_index": 93 }, { - "subtask": "Grasp the plugboard with the right gripper", - "subtask_index": 20 + "subtask": "Pick up the pink pot and put it in the left basket", + "subtask_index": 94 }, { - "subtask": "Grasp the yogurt with the right gripper", - "subtask_index": 21 + "subtask": "Grasp the sausage and put it in the right basket", + "subtask_index": 95 }, { - "subtask": "Grasp the tin with the left gripper", - "subtask_index": 22 + "subtask": "Pick up the yogurt and put it in the right basket", + "subtask_index": 96 }, { - "subtask": "Grasp the brown towel with the left gripper", - "subtask_index": 23 + "subtask": "Place the bath ball in the center of the table", + "subtask_index": 97 }, { - "subtask": "Place the square wooden block in the bowl with the left gripper", - "subtask_index": 24 + "subtask": "Grasp the square chewing gum and put it in the right basket", + "subtask_index": 98 }, { - "subtask": "Grasp the hard facial cleanser with the left gripper", - "subtask_index": 25 + "subtask": "Grasp the ad milk and put it in the right basket", + "subtask_index": 99 }, { - "subtask": "Grasp the brown towel with the right gripper", - "subtask_index": 26 + "subtask": "Place the pink pot in the center of the table", + "subtask_index": 100 }, { - "subtask": "Place the potato chips in the bowl with the right gripper", - "subtask_index": 27 + "subtask": "Grasp the compass and put it in the left basket", + "subtask_index": 101 }, { - "subtask": "Place the duck toy in the bowl with the left gripper", - "subtask_index": 28 + "subtask": "Place the glasses case in the center of the table", + "subtask_index": 102 }, { - "subtask": "Grasp the hard facial cleanser with the right gripper", - "subtask_index": 29 + "subtask": "Place the gray towel in the center of the table", + "subtask_index": 103 }, { - "subtask": "Place the green lemon in the bowl with the right gripper", - "subtask_index": 30 + "subtask": "Pick up the back scratcher and put it in the left basket", + "subtask_index": 104 }, { - "subtask": "Grasp the peach with the left gripper", - "subtask_index": 31 + "subtask": "Place the purple garbage bag in the center of the table", + "subtask_index": 105 }, { - "subtask": "Place the peach in the bowl with the left gripper", - "subtask_index": 32 + "subtask": "Place the blue cup in the center of the table", + "subtask_index": 106 }, { - "subtask": "Place the soft facial cleanser in the bowl with the right gripper", - "subtask_index": 33 + "subtask": "Pick up the blue towel and put it in the left basket", + "subtask_index": 107 }, { - "subtask": "Place the shower sphere in the bowl with the left gripper", - "subtask_index": 34 + "subtask": "Place the pear in the center of the table", + "subtask_index": 108 }, { - "subtask": "Place the banana in the bowl with the right gripper", - "subtask_index": 35 + "subtask": "Place the soft cleanser in the center of the table", + "subtask_index": 109 }, { - "subtask": "Place the shower sphere in the bowl with the right gripper", - "subtask_index": 36 + "subtask": "Grasp the egg yolk pastry and put it in the right basket", + "subtask_index": 110 }, { - "subtask": "Grasp the green lemon with the right gripper", - "subtask_index": 37 + "subtask": "Pick up the spoon and put it in the left basket", + "subtask_index": 111 }, { - "subtask": "Place the brown towel in the bowl with the right gripper", - "subtask_index": 38 + "subtask": "Place the sausage in the center of the table", + "subtask_index": 112 }, { - "subtask": "Place the blackboard erasure in the bowl with the left gripper", - "subtask_index": 39 + "subtask": "Place the green chewing gum in the center of the table", + "subtask_index": 113 }, { - "subtask": "Grasp the potato chips with the left gripper", - "subtask_index": 40 + "subtask": "Pick up the peach and put it in the right basket", + "subtask_index": 114 }, { - "subtask": "Grasp the duck toy with the right gripper", - "subtask_index": 41 + "subtask": "Place the can in the center of the table", + "subtask_index": 115 }, { - "subtask": "End", - "subtask_index": 42 + "subtask": "Grasp the mango and put it in the right basket", + "subtask_index": 116 }, { - "subtask": "Place the blue cup in the bowl with the right gripper", - "subtask_index": 43 + "subtask": "Place the canned cola in the center of the table", + "subtask_index": 117 }, { - "subtask": "Grasp the blackboard erasure with the left gripper", - "subtask_index": 44 + "subtask": "Pick up the gray towel and put it in the left basket", + "subtask_index": 118 }, { - "subtask": "Grasp the coke with the left gripper", - "subtask_index": 45 + "subtask": "Place the ad milk in the center of the table", + "subtask_index": 119 }, { - "subtask": "Grasp the round wooden block with the left gripper", - "subtask_index": 46 + "subtask": "Pick up the sausage and put it in the right basket", + "subtask_index": 120 }, { - "subtask": "Place the round wooden block in the bowl with the left gripper", - "subtask_index": 47 + "subtask": "Grasp the hard cleanser and put it in the left basket", + "subtask_index": 121 }, { - "subtask": "Place the banana in the bowl with the left gripper", - "subtask_index": 48 + "subtask": "Pick up the pink marker and put it in the left basket", + "subtask_index": 122 }, { - "subtask": "Place the chocolate cake in the bowl with the left gripper", - "subtask_index": 49 + "subtask": "Grasp the detergent and put it in the left basket", + "subtask_index": 123 }, { - "subtask": "Place the tape in the bowl with the right gripper", - "subtask_index": 50 + "subtask": "Pick up the pink bowl and put it in the left basket", + "subtask_index": 124 }, { - "subtask": "Grasp the compasses with the left gripper", - "subtask_index": 51 + "subtask": "Grasp the Red Bull and put it in the right basket", + "subtask_index": 125 }, { - "subtask": "Place the chocolate in the bowl with the left gripper", - "subtask_index": 52 + "subtask": "Pick up the ad milk and put it in the right basket", + "subtask_index": 126 }, { - "subtask": "Grasp the blue pot with the right gripper", - "subtask_index": 53 + "subtask": "Place the long bread in the center of the table", + "subtask_index": 127 }, { - "subtask": "Place the hard facial cleanser in the bowl with the left gripper", - "subtask_index": 54 + "subtask": "Pick up the soft cleanser and put it in the left basket", + "subtask_index": 128 }, { - "subtask": "Grasp the round bread with the right gripper", - "subtask_index": 55 + "subtask": "Place the croissant in the center of the table", + "subtask_index": 129 }, { - "subtask": "Grasp the chocolate cake with the left gripper", - "subtask_index": 56 + "subtask": "Grasp the lime and put it in the right basket", + "subtask_index": 130 }, { - "subtask": "Place the hard facial cleanser in the bowl with the right gripper", - "subtask_index": 57 + "subtask": "Place the round bread in the center of the table", + "subtask_index": 131 }, { - "subtask": "Place the round bread in the bowl with the right gripper", - "subtask_index": 58 + "subtask": "Pick up the green chewing gum and put it in the right basket", + "subtask_index": 132 }, { - "subtask": "Grasp the tape with the right gripper", - "subtask_index": 59 + "subtask": "Grasp the pink bowl and put it in the left basket", + "subtask_index": 133 }, { - "subtask": "Place the tape in the bowl with the left gripper", - "subtask_index": 60 + "subtask": "Place the playing cards in the center of the table", + "subtask_index": 134 }, { - "subtask": "Grasp the coke with the right gripper", - "subtask_index": 61 + "subtask": "Pick up the glasses case and put it in the left basket", + "subtask_index": 135 }, { - "subtask": "Grasp the tape with the left gripper", - "subtask_index": 62 + "subtask": "Pick up the fruit fudge and put it in the right basket", + "subtask_index": 136 }, { - "subtask": "Place the duck toy in the bowl with the right gripper", - "subtask_index": 63 + "subtask": "Pick up the eggplant and put it in the right basket", + "subtask_index": 137 }, { - "subtask": "Grasp the square chewing gum with the right gripper", - "subtask_index": 64 + "subtask": "Place the white eraser in the center of the table", + "subtask_index": 138 }, { - "subtask": "Place the blue pot in the bowl with the left gripper", - "subtask_index": 65 + "subtask": "Place the yogurt in the center of the table", + "subtask_index": 139 }, { - "subtask": "Grasp the shower sphere with the right gripper", - "subtask_index": 66 + "subtask": "Pick up the detergent and put it in the left basket", + "subtask_index": 140 }, { - "subtask": "Place the plugboard in the bowl with the left gripper", - "subtask_index": 67 + "subtask": "Grasp the blue towel and put it in the left basket", + "subtask_index": 141 }, { - "subtask": "Place the coke in the bowl with the right gripper", - "subtask_index": 68 + "subtask": "Pick up the red bull and put it in the right basket", + "subtask_index": 142 }, { - "subtask": "Place the round bread in the bowl with the left gripper", - "subtask_index": 69 + "subtask": "Place the lime in the center of the table", + "subtask_index": 143 }, { - "subtask": "Place the blue cup in the bowl with the left gripper", - "subtask_index": 70 + "subtask": "Grasp the mint candy and put it in the right basket", + "subtask_index": 144 }, { - "subtask": "Place the blue pot in the bowl with the right gripper", - "subtask_index": 71 + "subtask": "Place the soap in the center of the table", + "subtask_index": 145 }, { - "subtask": "Grasp the round bread with the left gripper", - "subtask_index": 72 + "subtask": "Grasp the red marker and put it in the left basket", + "subtask_index": 146 }, { - "subtask": "Grasp the chocolate with the left gripper", - "subtask_index": 73 + "subtask": "Pick up the purple garbage bag and put it in the left basket", + "subtask_index": 147 }, { - "subtask": "Grasp the square wooden block with the left gripper", - "subtask_index": 74 + "subtask": "Place the detergent in the center of the table", + "subtask_index": 148 }, { - "subtask": "Place the yogurt in the bowl with the right gripper", - "subtask_index": 75 + "subtask": "Pick up the bath ball and put it in the left basket", + "subtask_index": 149 }, { - "subtask": "Grasp the blue cup with the right gripper", - "subtask_index": 76 + "subtask": "Grasp the gray towel and put it in the left basket", + "subtask_index": 150 }, { - "subtask": "Grasp the soft facial cleanser with the right gripper", - "subtask_index": 77 + "subtask": "Grasp the yellow marker and put it in the left basket", + "subtask_index": 151 }, { - "subtask": "Place the compasses in the bowl with the left gripper", - "subtask_index": 78 + "subtask": "Grasp the pink towel and put it in the left basket", + "subtask_index": 152 }, { - "subtask": "Grasp the square wooden block with the right gripper", - "subtask_index": 79 + "subtask": "End", + "subtask_index": 153 }, { - "subtask": "Place the brown towel in the bowl with the left gripper", - "subtask_index": 80 + "subtask": "Place the shampoo in the center of the table", + "subtask_index": 154 }, { - "subtask": "Grasp the round wooden block with the right gripper", - "subtask_index": 81 + "subtask": "Grasp the green lemon and put it in the right basket", + "subtask_index": 155 }, { - "subtask": "Grasp the banana with the right gripper", - "subtask_index": 82 + "subtask": "Grasp the round chewing gum and put it in the right basket", + "subtask_index": 156 }, { "subtask": "null", - "subtask_index": 83 + "subtask_index": 157 } ], "atomic_actions": [ @@ -98653,8 +107806,8 @@ "came_info": { "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -98671,23 +107824,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 101, - "total_frames": 23706, + "total_episodes": 224, + "total_frames": 251221, "fps": 30, - "total_tasks": 84, - "total_videos": 404, + "total_tasks": 158, + "total_videos": 896, "total_chunks": 1, "chunks_size": 1000, "state_dim": 14, "action_dim": 14, "camera_views": 4, - "dataset_size": "909.34 MB" + "dataset_size": "25.62 GB" }, - "frame_num": 23706, - "dataset_size": "909.34 MB", - "data_structure": "Galaxea_R1_Lite_storage_object_brown_bowl_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (89 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 251221, + "dataset_size": "25.62 GB", + "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_classify_object_green_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (212 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:100" + "train": "0:223" }, "features": { "observation.images.cam_head_left_rgb": { @@ -98739,8 +107892,8 @@ "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 360, + 640, 3 ], "names": [ @@ -98749,8 +107902,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 360, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -98762,8 +107915,8 @@ "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 360, + 640, 3 ], "names": [ @@ -98772,8 +107925,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 360, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -99057,9 +108210,9 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Cobot_Magic_cube_reset": { - "path": "Cobot_Magic_cube_reset", - "dataset_name": "cube_reset", + "AgiBot-g1_robotic_arm_picks_up_parts": { + "path": "AgiBot-g1_robotic_arm_picks_up_parts", + "dataset_name": "robotic_arm_picks_up_parts", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -99070,7 +108223,7 @@ "pick", "place" ], - "tasks": "End", + "tasks": "Place the hard drive bracket into the box.", "objects": [ { "object_name": "table", @@ -99081,35 +108234,35 @@ "level5": null }, { - "object_name": "cube_block", - "level1": "toy", - "level2": "cube_block", + "object_name": "black_container", + "level1": "container", + "level2": "black_container", "level3": null, "level4": null, "level5": null }, { - "object_name": "cube_groove", - "level1": "container", - "level2": "cube_groove", + "object_name": "parts", + "level1": "mechanical_parts", + "level2": "parts", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-28883", - "dataset_size": "514.5MB", + "operation_platform_height": null, + "frame_range": "0-30190", + "dataset_size": "15.0GB", "statistics": { - "total_episodes": 98, - "total_frames": 28883, + "total_episodes": 53, + "total_frames": 30190, "total_tasks": 1, - "total_videos": 294, + "total_videos": 424, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "5f4b3baf-119d-489b-b64d-59ea1b989422", + "dataset_uuid": "9d81f45b-a37d-46ad-b5cd-899737518fb6", "language": [ "en", "zh" @@ -99118,11 +108271,8 @@ "robotics" ], "sub_tasks": [ - "End", - "Pick up the cube", - "Grasp the cube", - "Place cube into the cube recess", - "Place cube into the cube-shaped recess", + "Place the hard drive bracket into the box.", + "Pick up the hard drive bracket from the feeding area.", "null" ], "annotations": { @@ -99160,10 +108310,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Cobot_Magic_cube_reset_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_cube_reset_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AgiBot-g1_robotic_arm_picks_up_parts_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_robotic_arm_picks_up_parts_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_take_book": { + "Airbot_MMK2_move_medicine_bottle": { "task_categories": [ "robotics" ], @@ -99193,470 +108343,16 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_take_book", + "dataset_name": "Airbot_MMK2_move_medicine_bottle", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "study_room", + "level1": "medical_healthcare", + "level2": "pharmacy", "level3": null, "level4": null, "level5": null }, "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ - { - "object_name": "book", - "level1": "stationery", - "level2": "book", - "level3": null, - "level4": null, - "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "take a book from the forward pile with hand." - ], - "sub_tasks": [ - { - "subtask": "End", - "subtask_index": 0 - }, - { - "subtask": "Place the book on the table with the right gripper", - "subtask_index": 1 - }, - { - "subtask": "Hook the third book on the right with the right gripper", - "subtask_index": 2 - }, - { - "subtask": "null", - "subtask_index": 3 - } - ], - "atomic_actions": [ - "clip", - "pull", - "pick", - "takeout", - "place" - ], - "robot_name": [ - "Airbot_MMK2" - ], - "end_effector_type": "five_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" - ], - "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" - }, - "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" - ], - "statistics": { - "total_episodes": 98, - "total_frames": 28489, - "fps": 30, - "total_tasks": 4, - "total_videos": 392, - "total_chunks": 1, - "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "1.03 GB" - }, - "frame_num": 28489, - "dataset_size": "1.03 GB", - "data_structure": "Airbot_MMK2_take_book_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", - "splits": { - "train": "0:97" - }, - "features": { - "observation.images.cam_head_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_left_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_right_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.state": { - "dtype": "float32", - "shape": [ - 36 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" - ] - }, - "action": { - "dtype": "float32", - "shape": [ - 36 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" - ] - }, - "timestamp": { - "dtype": "float32", - "shape": [ - 1 - ], - "names": null - }, - "frame_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "episode_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "task_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "subtask_annotation": { - "names": null, - "shape": [ - 5 - ], - "dtype": "int32" - }, - "scene_annotation": { - "names": null, - "shape": [ - 1 - ], - "dtype": "int32" - }, - "eef_sim_pose_state": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_sim_pose_action": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_direction_action": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - } - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" - } - ] - }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" - }, - "Split_aloha_fold_the_pants": { - "path": "Split_aloha_fold_the_pants", - "dataset_name": "fold_the_pants", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "fold" - ], - "tasks": "Fold the bottom of the pants upward", "objects": [ { "object_name": "table", @@ -99667,123 +108363,17 @@ "level5": null }, { - "object_name": "pants", - "level1": "clothing", - "level2": "pants", + "object_name": "medicine_bottle", + "level1": "container", + "level2": "medicine_bottle", "level3": null, "level4": null, "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-402577", - "dataset_size": "8.5GB", - "statistics": { - "total_episodes": 486, - "total_frames": 402577, - "total_tasks": 4, - "total_videos": 1458, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "2d20404b-b035-4962-9e8e-055858867cb3", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Fold the bottom of the pants upward", - "Fold the pants from left to right", - "Fold your pants from right to left", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Split_aloha_fold_the_pants_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Split_aloha_fold_the_pants_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Airbot_MMK2_close_lid": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } - ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_close_lid", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "household", - "level2": "study_room", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ { - "object_name": "any_storage_box", - "level1": "storage utensils", - "level2": "any_storage_box", + "object_name": "board", + "level1": "material", + "level2": "board", "level3": null, "level4": null, "level5": null @@ -99791,40 +108381,46 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "close the box lid by hand." + "place the medicine bottles on the table onto the board with both hands." ], "sub_tasks": [ { - "subtask": "Touch the box lid with the left gripper", + "subtask": "Place the white bottle on the white board with the right gripper", "subtask_index": 0 }, { - "subtask": "Close the box lid with the left gripper", + "subtask": "Abnormal", "subtask_index": 1 }, { - "subtask": "End", + "subtask": "Grasp the white bottle with the left gripper", "subtask_index": 2 }, { - "subtask": "Touch the box lid with the right gripper", + "subtask": "Static", "subtask_index": 3 }, { - "subtask": "Abnormal", + "subtask": "End", "subtask_index": 4 }, { - "subtask": "Close the box lid with the right gripper", + "subtask": "Grasp the white bottle the right gripper", "subtask_index": 5 }, { - "subtask": "null", + "subtask": "Place the white bottle on the white board with the left gripper", "subtask_index": 6 + }, + { + "subtask": "null", + "subtask_index": 7 } ], "atomic_actions": [ - "close" + "grasp", + "pick", + "place" ], "robot_name": [ "Airbot_MMK2" @@ -99858,23 +108454,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 239, - "total_frames": 28930, + "total_episodes": 50, + "total_frames": 16815, "fps": 30, - "total_tasks": 7, - "total_videos": 956, + "total_tasks": 8, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "965.25 MB" + "dataset_size": "493.20 MB" }, - "frame_num": 28930, - "dataset_size": "965.25 MB", - "data_structure": "Airbot_MMK2_close_lid_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (227 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 16815, + "dataset_size": "493.20 MB", + "data_structure": "Airbot_MMK2_move_medicine_bottle_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:238" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -100228,102 +108824,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "G1edu-u3_place_metal_bowl_ae": { - "path": "G1edu-u3_place_metal_bowl_ae", - "dataset_name": "place_metal_bowl_ae", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "pick", - "place" - ], - "tasks": "Place the metal bowl on the table with left gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "metal_bowl", - "level1": "bowl", - "level2": "metal_bowl", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-7884", - "dataset_size": "120.4MB", - "statistics": { - "total_episodes": 38, - "total_frames": 7884, - "total_tasks": 1, - "total_videos": 38, - "total_chunks": 1, - "chunks_size": 39, - "fps": 30 - }, - "dataset_uuid": "58ed5982-2a5f-4762-a995-5d590a2beeb4", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the metal bowl on the table with left gripper", - "End", - "Place the metal bowl on the table with right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_place_metal_bowl_ae_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_place_metal_bowl_ae_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Airbot_MMK2_storage_tape_measure_umbrella": { + "Galaxea_R1_Lite_mix_color": { "task_categories": [ "robotics" ], @@ -100353,11 +108854,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_tape_measure_umbrella", + "dataset_name": "Galaxea_R1_Lite_mix_color", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "living_room", + "level1": "office_workspace", + "level2": "office", "level3": null, "level4": null, "level5": null @@ -100365,25 +108866,49 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "basin", - "level1": "storage_utensils", - "level2": "basin", + "object_name": "blue_pigment", + "level1": "materials", + "level2": "blue_pigment", "level3": null, "level4": null, "level5": null }, { - "object_name": "tape_measure", - "level1": "rulers", - "level2": "tape_measure", + "object_name": "red_pigment", + "level1": "materials", + "level2": "red_pigment", "level3": null, "level4": null, "level5": null }, { - "object_name": "umbrella", - "level1": "items", - "level2": "umbrella", + "object_name": "white_pigment", + "level1": "materials", + "level2": "white_pigment", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "test_tube_rack", + "level1": "holding_utensils", + "level2": "test_tube_rack", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "large_test_tubes", + "level1": "laboratory_supplies", + "level2": "large_test_tubes", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "beaker", + "level1": "laboratory_supplies", + "level2": "holding_utensils", "level3": null, "level4": null, "level5": null @@ -100391,59 +108916,96 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "put the tape measure and the umbrella in the basin." + "pick up the test tube with red pigment the test tube with blue pigment and the test tube with white pigment by grippers and pour them into the beaker." ], "sub_tasks": [ { - "subtask": "End", + "subtask": "Pour the red reagent into the graduated cylinder and place the test tube into the paper cup", "subtask_index": 0 }, { - "subtask": "Grasp the tape measure with the right gripper", + "subtask": "Pour the orange reagent into the graduated cylinder and place the test tube into the paper cup", "subtask_index": 1 }, { - "subtask": "Grasp the umbrella with the left gripper", + "subtask": "Grasp the red reagent with the left gripper", "subtask_index": 2 }, { - "subtask": "Static", + "subtask": "Pour the white reagent into the graduated cylinder with the right gripper", "subtask_index": 3 }, { - "subtask": "Place the umbrella in the white basket with the left gripper", + "subtask": "Grasp the red reagent with the right gripper", "subtask_index": 4 }, { - "subtask": "Place the tape measure in the white basket with the right gripper", + "subtask": "Pour the blue reagent into the graduated cylinder and place the test tube into the paper cup", "subtask_index": 5 }, { - "subtask": "null", + "subtask": "Pour the blue reagent into the graduated cylinder with the left gripper", "subtask_index": 6 + }, + { + "subtask": "Pour the red reagent into the graduated cylinder with the left gripper", + "subtask_index": 7 + }, + { + "subtask": "End", + "subtask_index": 8 + }, + { + "subtask": "Place the test tube into the paper cup with the right gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the test tube into the paper cup with the left gripper", + "subtask_index": 10 + }, + { + "subtask": "Grasp the blue reagent with the left gripper", + "subtask_index": 11 + }, + { + "subtask": "Pour the red reagent into the graduated cylinder with the right gripper", + "subtask_index": 12 + }, + { + "subtask": "Grasp the white reagent with the right gripper", + "subtask_index": 13 + }, + { + "subtask": "Pour the white reagent into the graduated cylinder and place the test tube into the paper cup", + "subtask_index": 14 + }, + { + "subtask": "null", + "subtask_index": 15 } ], "atomic_actions": [ "grasp", "pick", - "place" + "place", + "pour" ], "robot_name": [ - "Airbot_MMK2" + "Galaxea_R1_Lite" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -100460,30 +109022,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 40, - "total_frames": 7112, + "total_episodes": 50, + "total_frames": 79584, "fps": 30, - "total_tasks": 7, - "total_videos": 160, + "total_tasks": 16, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, + "state_dim": 14, + "action_dim": 14, "camera_views": 4, - "dataset_size": "236.10 MB" + "dataset_size": "2.99 GB" }, - "frame_num": 7112, - "dataset_size": "236.10 MB", - "data_structure": "Airbot_MMK2_storage_tape_measure_umbrella_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (28 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 79584, + "dataset_size": "2.99 GB", + "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_mix_color_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:39" + "train": "0:49" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -100492,8 +109054,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -100502,11 +109064,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_head_right_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -100515,8 +109077,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -100525,10 +109087,10 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -100538,7 +109100,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -100548,10 +109110,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -100561,7 +109123,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -100574,7 +109136,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -100589,36 +109151,14 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -100633,30 +109173,8 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "timestamp": { @@ -100798,10 +109316,70 @@ ], "dtype": "int32" }, - "eef_acc_mag_action": { + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" + "left_gripper_activity", + "right_gripper_activity" ], "shape": [ 2 @@ -100828,748 +109406,607 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Galaxea_R1_Lite_classify_object_green_tablecloth": { + "AIRBOT_MMK2_organize_books": { + "path": "AIRBOT_MMK2_organize_books", + "dataset_name": "organize_books", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Static", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "book", + "level1": "office_supplies", + "level2": "book", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bookshelf", + "level1": "container", + "level2": "bookshelf", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-9576", + "dataset_size": "439.6MB", + "statistics": { + "total_episodes": 50, + "total_frames": 9576, + "total_tasks": 1, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "b36aed44-2805-4713-8d48-a14bebf79671", + "language": [ + "en", + "zh" + ], "task_categories": [ "robotics" ], - "language": [ - "en" + "sub_tasks": [ + "Static", + "Grasp the yellow book with the right gripper", + "Place the yellow book into the front mezzanine of the bookshelf with the right gripper", + "End", + "null" ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", "tags": [ "RoboCOIN", "LeRobot" ], - "license": "apache-2.0", - "configs": [ - { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AIRBOT_MMK2_organize_books_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_organize_books_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "RMC-AIDA-L_fold_towel": { + "path": "RMC-AIDA-L_fold_towel", + "dataset_name": "fold_towel", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_classify_object_green_tablecloth", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "commercial_convenience", - "level2": "supermarket", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick", + "unfold" + ], + "tasks": "End", "objects": [ { - "object_name": "brown_basket", - "level1": "baskets", - "level2": "brown_basket", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "yellow_basket", - "level1": "baskets", - "level2": "yellow_basket", + "object_name": "towel", + "level1": "clothing", + "level2": "towel", "level3": null, "level4": null, "level5": null - }, + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-139069", + "dataset_size": "1.5GB", + "statistics": { + "total_episodes": 314, + "total_frames": 139069, + "total_tasks": 2, + "total_videos": 942, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "189eaf6e-2609-4e71-8e6e-e6393a5d259e", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Press and secure one corner of the towel with the right arm", + "Unfold the towel with the left arm", + "Press and secure one corner of the towel with the left arm", + "Press and secure one corner of the towel with the right gripper", + "Unfold the towel with the left gripper", + "Unfold the towel with the right arm", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "RMC-AIDA-L_fold_towel_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_fold_towel_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Cobot_Magic_box_storage_chopsticks": { + "path": "Cobot_Magic_box_storage_chopsticks", + "dataset_name": "box_storage_chopsticks", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick" + ], + "tasks": "use the right arm to put the chopsticks into the lunch box", + "objects": [ { - "object_name": "any_fruits", - "level1": "fruits", - "level2": "any_fruits", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "any_vegetables", - "level1": "vegetables", - "level2": "any_vegetables", + "object_name": "tablecloth", + "level1": "clothing", + "level2": "tablecloth", "level3": null, "level4": null, "level5": null }, { - "object_name": "any_snacks", - "level1": "snacks", - "level2": "any_snacks", + "object_name": "pallet", + "level1": "container", + "level2": "pallet", "level3": null, "level4": null, "level5": null }, { - "object_name": "any_bread", - "level1": "bread", - "level2": "any_bread", + "object_name": "chopsticks", + "level1": "tableware", + "level2": "chopsticks", "level3": null, "level4": null, "level5": null }, { - "object_name": "green_table_cloths", - "level1": "table_cloths", - "level2": "green_table_cloths", + "object_name": "box", + "level1": "container", + "level2": "box", "level3": null, "level4": null, "level5": null } ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "on the green table,place the food in the right basket with the right gripper, and place the non food items in the left basket with the left gripper." + "operation_platform_height": null, + "frame_range": "0-170337", + "dataset_size": "6.4GB", + "statistics": { + "total_episodes": 499, + "total_frames": 170337, + "total_tasks": 6, + "total_videos": 1497, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "820026e7-2878-4127-b935-c5e584a825bc", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" ], "sub_tasks": [ + "use the right arm to put the chopsticks into the lunch box", + "Abnormal", + "use the left arm to grab a pair of chopsticks", + "End", + "Grasp the a pair of chopsticks with the left gripper", + "Place the a pair of chopsticks in the box with the left gripper", + "use the left arm to put the chopsticks into the lunch box", + "use the right arm to grab a pair of chopsticks", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_box_storage_chopsticks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_box_storage_chopsticks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "R1_Lite_peach_storage": { + "path": "R1_Lite_peach_storage", + "dataset_name": "peach_storage", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick" + ], + "tasks": "End", + "objects": [ { - "subtask": "Grasp the round bread and put it in the right basket", - "subtask_index": 0 - }, - { - "subtask": "Grasp the can and put it in the right basket", - "subtask_index": 1 - }, - { - "subtask": "Pick up the compass and put it in the left basket", - "subtask_index": 2 - }, - { - "subtask": "Grasp the soft cleanser and put it in the left basket", - "subtask_index": 3 - }, - { - "subtask": "Pick up the orange and put it in the right basket", - "subtask_index": 4 - }, - { - "subtask": "Pick up the round chewing gum and put it in the right basket", - "subtask_index": 5 - }, - { - "subtask": "Grasp the playing cards and put it in the left basket", - "subtask_index": 6 - }, - { - "subtask": "Grasp the purple garbage bag and put it in the left basket", - "subtask_index": 7 - }, - { - "subtask": "Place the back scratcher in the center of the table", - "subtask_index": 8 - }, - { - "subtask": "Place the eggplant in the center of the table", - "subtask_index": 9 - }, - { - "subtask": "Pick up the square chewing gum and put it in the right basket", - "subtask_index": 10 - }, - { - "subtask": "Pick up the washing liquid and put it in the left basket", - "subtask_index": 11 - }, - { - "subtask": "Grasp the red pot and put it in the left basket", - "subtask_index": 12 - }, - { - "subtask": "Grasp the grape and put it in the right basket", - "subtask_index": 13 - }, - { - "subtask": "Pick up the tape and put it in the left basket", - "subtask_index": 14 - }, - { - "subtask": "Pick up the shampoo and put it in the left basket", - "subtask_index": 15 - }, - { - "subtask": "Pick up the playing cards and put it in the left basket", - "subtask_index": 16 - }, - { - "subtask": "Place the orange in the center of the table", - "subtask_index": 17 - }, - { - "subtask": "Grasp the eggplant and put it in the right basket", - "subtask_index": 18 - }, - { - "subtask": "Pick up the banana and put it in the right basket", - "subtask_index": 19 - }, - { - "subtask": "Grasp the washing liquid and put it in the left basket", - "subtask_index": 20 - }, - { - "subtask": "Grasp the shampoo and put it in the left basket", - "subtask_index": 21 - }, - { - "subtask": "Place the pink bowl in the center of the table", - "subtask_index": 22 - }, - { - "subtask": "Place the compass in the center of the table", - "subtask_index": 23 - }, - { - "subtask": "Place the peach in the center of the table", - "subtask_index": 24 - }, - { - "subtask": "Place the banana in the center of the table", - "subtask_index": 25 - }, - { - "subtask": "Place the tape in the center of the table", - "subtask_index": 26 - }, - { - "subtask": "Pick up the round bread and put it in the left basket", - "subtask_index": 27 - }, - { - "subtask": "Pick up the tea cup and put it in the left basket", - "subtask_index": 28 - }, - { - "subtask": "Pick up the round bread and put it in the right basket", - "subtask_index": 29 - }, - { - "subtask": "Grasp the banana and put it in the right basket", - "subtask_index": 30 - }, - { - "subtask": "Grasp the pear and put it in the right basket", - "subtask_index": 31 - }, - { - "subtask": "Place the lemon and put it in the center of the table", - "subtask_index": 32 - }, - { - "subtask": "Grasp the compass and put it in the right basket", - "subtask_index": 33 - }, - { - "subtask": "Pick up the can and put it in the right basket", - "subtask_index": 34 - }, - { - "subtask": "Place the red pot in the center of the table", - "subtask_index": 35 - }, - { - "subtask": "Pick up the yellow marker and put it in the left basket", - "subtask_index": 36 - }, - { - "subtask": "Grasp the yellow cake and put it in the right basket", - "subtask_index": 37 - }, - { - "subtask": "Grasp the long bread and put it in the right basket", - "subtask_index": 38 - }, - { - "subtask": "Grasp the canned cola and put it in the right basket", - "subtask_index": 39 - }, - { - "subtask": "Grasp the glasses case and put it in the left basket", - "subtask_index": 40 - }, - { - "subtask": "Pick up the lime and put it in the right basket", - "subtask_index": 41 - }, - { - "subtask": "Pick up the pear and put it in the right basket", - "subtask_index": 42 - }, - { - "subtask": "Grasp the green chewing gum and put it in the right basket", - "subtask_index": 43 - }, - { - "subtask": "Pick up the croissant and put it in the right basket", - "subtask_index": 44 - }, - { - "subtask": "Pick up the chinese cabbage and put it in the right basket", - "subtask_index": 45 - }, - { - "subtask": "Place the fruit fudge in the center of the table", - "subtask_index": 46 - }, - { - "subtask": "Grasp the white eraser and put it in the left basket", - "subtask_index": 47 - }, - { - "subtask": "Place the yellow marker in the center of the table", - "subtask_index": 48 - }, - { - "subtask": "Place the round chewing gum in the center of the table", - "subtask_index": 49 - }, - { - "subtask": "Pick up the canned cola and put it in the right basket", - "subtask_index": 50 - }, - { - "subtask": "Pick up the red pot and put it in the left basket", - "subtask_index": 51 - }, - { - "subtask": "Place the green lemon in the center of the table", - "subtask_index": 52 - }, - { - "subtask": "Grasp the pink marker and put it in the left basket", - "subtask_index": 53 - }, - { - "subtask": "Place the washing liquid in the center of the table", - "subtask_index": 54 - }, - { - "subtask": "Abnormal", - "subtask_index": 55 - }, - { - "subtask": "Grasp the peach and put it in the right basket", - "subtask_index": 56 - }, - { - "subtask": "Grasp the soap and put it in the left basket", - "subtask_index": 57 - }, - { - "subtask": "Place the Red Bull in the center of the table", - "subtask_index": 58 - }, - { - "subtask": "Place the mango in the center of the table", - "subtask_index": 59 - }, - { - "subtask": "Grasp the fruit fudge and put it in the right basket", - "subtask_index": 60 - }, - { - "subtask": "Grasp the bath ball and put it in the left basket", - "subtask_index": 61 - }, - { - "subtask": "Grasp the red bull and put it in the right basket", - "subtask_index": 62 - }, - { - "subtask": "Pick up the lemon and put it in the right basket", - "subtask_index": 63 - }, - { - "subtask": "Grasp the chinese cabbage and put it in the right basket", - "subtask_index": 64 - }, - { - "subtask": "Grasp the pink pot and put it in the left basket", - "subtask_index": 65 - }, - { - "subtask": "Grasp the croissant and put it in the right basket", - "subtask_index": 66 - }, - { - "subtask": "Grasp the soda water and put it in the right basket", - "subtask_index": 67 - }, - { - "subtask": "Pick up the egg yolk pastry and put it in the right basket", - "subtask_index": 68 - }, - { - "subtask": "Grasp the spoon and put it in the left basket", - "subtask_index": 69 - }, - { - "subtask": "Grasp the blue cup and put it in the left basket", - "subtask_index": 70 - }, - { - "subtask": "Place the blue towel in the center of the table", - "subtask_index": 71 - }, - { - "subtask": "Place the yellow cake in the center of the table", - "subtask_index": 72 - }, - { - "subtask": "Place the grey cup in the center of the table", - "subtask_index": 73 - }, - { - "subtask": "Grasp the tape and put it in the left basket", - "subtask_index": 74 - }, - { - "subtask": "Place the square chewing gum in the center of the table", - "subtask_index": 75 - }, - { - "subtask": "Pick up the milk and put it in the right basket", - "subtask_index": 76 - }, - { - "subtask": "Grasp the back scratcher and put it in the right basket", - "subtask_index": 77 - }, - { - "subtask": "Grasp the yogurt and put it in the right basket", - "subtask_index": 78 - }, - { - "subtask": "Pick up the yellow cake and put it in the right basket", - "subtask_index": 79 - }, - { - "subtask": "Pick up the blue bowl and put it in the left basket", - "subtask_index": 80 - }, - { - "subtask": "Grasp the lemon and put it in the right basket", - "subtask_index": 81 - }, - { - "subtask": "Grasp the back scratcher and put it in the left basket", - "subtask_index": 82 - }, - { - "subtask": "Grasp the orange and put it in the right basket", - "subtask_index": 83 - }, - { - "subtask": "Pick up the soda water and put it in the right basket", - "subtask_index": 84 - }, - { - "subtask": "Grasp the red pot and put it in the right basket", - "subtask_index": 85 - }, - { - "subtask": "Grasp the milk and put it in the right basket", - "subtask_index": 86 - }, - { - "subtask": "Pick up the blue cup and put it in the left basket", - "subtask_index": 87 - }, - { - "subtask": "Pick up the mango and put it in the right basket", - "subtask_index": 88 - }, - { - "subtask": "Place the pink marker in the center of the table", - "subtask_index": 89 - }, - { - "subtask": "Grasp the grey cup and put it in the left basket", - "subtask_index": 90 - }, - { - "subtask": "Pick up the long bread and put it in the right basket", - "subtask_index": 91 - }, - { - "subtask": "Pick up the soap and put it in the left basket", - "subtask_index": 92 - }, - { - "subtask": "Grasp the peach and put it in the left basket", - "subtask_index": 93 - }, - { - "subtask": "Pick up the pink pot and put it in the left basket", - "subtask_index": 94 - }, - { - "subtask": "Grasp the sausage and put it in the right basket", - "subtask_index": 95 - }, - { - "subtask": "Pick up the yogurt and put it in the right basket", - "subtask_index": 96 - }, - { - "subtask": "Place the bath ball in the center of the table", - "subtask_index": 97 - }, - { - "subtask": "Grasp the square chewing gum and put it in the right basket", - "subtask_index": 98 - }, - { - "subtask": "Grasp the ad milk and put it in the right basket", - "subtask_index": 99 - }, - { - "subtask": "Place the pink pot in the center of the table", - "subtask_index": 100 - }, - { - "subtask": "Grasp the compass and put it in the left basket", - "subtask_index": 101 - }, - { - "subtask": "Place the glasses case in the center of the table", - "subtask_index": 102 - }, - { - "subtask": "Place the gray towel in the center of the table", - "subtask_index": 103 - }, - { - "subtask": "Pick up the back scratcher and put it in the left basket", - "subtask_index": 104 - }, - { - "subtask": "Place the purple garbage bag in the center of the table", - "subtask_index": 105 - }, - { - "subtask": "Place the blue cup in the center of the table", - "subtask_index": 106 - }, - { - "subtask": "Pick up the blue towel and put it in the left basket", - "subtask_index": 107 - }, - { - "subtask": "Place the pear in the center of the table", - "subtask_index": 108 - }, - { - "subtask": "Place the soft cleanser in the center of the table", - "subtask_index": 109 - }, - { - "subtask": "Grasp the egg yolk pastry and put it in the right basket", - "subtask_index": 110 - }, - { - "subtask": "Pick up the spoon and put it in the left basket", - "subtask_index": 111 - }, - { - "subtask": "Place the sausage in the center of the table", - "subtask_index": 112 - }, - { - "subtask": "Place the green chewing gum in the center of the table", - "subtask_index": 113 - }, - { - "subtask": "Pick up the peach and put it in the right basket", - "subtask_index": 114 - }, - { - "subtask": "Place the can in the center of the table", - "subtask_index": 115 - }, - { - "subtask": "Grasp the mango and put it in the right basket", - "subtask_index": 116 - }, - { - "subtask": "Place the canned cola in the center of the table", - "subtask_index": 117 - }, - { - "subtask": "Pick up the gray towel and put it in the left basket", - "subtask_index": 118 - }, - { - "subtask": "Place the ad milk in the center of the table", - "subtask_index": 119 - }, - { - "subtask": "Pick up the sausage and put it in the right basket", - "subtask_index": 120 - }, - { - "subtask": "Grasp the hard cleanser and put it in the left basket", - "subtask_index": 121 - }, - { - "subtask": "Pick up the pink marker and put it in the left basket", - "subtask_index": 122 - }, - { - "subtask": "Grasp the detergent and put it in the left basket", - "subtask_index": 123 - }, - { - "subtask": "Pick up the pink bowl and put it in the left basket", - "subtask_index": 124 - }, - { - "subtask": "Grasp the Red Bull and put it in the right basket", - "subtask_index": 125 - }, - { - "subtask": "Pick up the ad milk and put it in the right basket", - "subtask_index": 126 - }, - { - "subtask": "Place the long bread in the center of the table", - "subtask_index": 127 - }, - { - "subtask": "Pick up the soft cleanser and put it in the left basket", - "subtask_index": 128 - }, - { - "subtask": "Place the croissant in the center of the table", - "subtask_index": 129 - }, - { - "subtask": "Grasp the lime and put it in the right basket", - "subtask_index": 130 - }, - { - "subtask": "Place the round bread in the center of the table", - "subtask_index": 131 - }, - { - "subtask": "Pick up the green chewing gum and put it in the right basket", - "subtask_index": 132 + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the pink bowl and put it in the left basket", - "subtask_index": 133 + "object_name": "basket", + "level1": "container", + "level2": "basket", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the playing cards in the center of the table", - "subtask_index": 134 - }, + "object_name": "peach", + "level1": "fruit", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-31755", + "dataset_size": "1.5GB", + "statistics": { + "total_episodes": 110, + "total_frames": 31755, + "total_tasks": 1, + "total_videos": 440, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "e596c797-57d3-4fbb-ba99-5dec78a4c0b2", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Place the light basket on the middle of the table with the left gripper", + "Grasp the light basket with the left gripper", + "Static", + "Grasp the peach with the right gripper", + "Place the peach into the light basket with the right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_peach_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_peach_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Galaxea_R1_Lite_pour_solid_marble_bar_counter": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ { - "subtask": "Pick up the glasses case and put it in the left basket", - "subtask_index": 135 + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Galaxea_R1_Lite_pour_solid_marble_bar_counter", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "househhold", + "level2": "kitchen", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ { - "subtask": "Pick up the fruit fudge and put it in the right basket", - "subtask_index": 136 + "object_name": "marble_bar_counter", + "level1": "furniture", + "level2": "marble_bar_counter", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Pick up the eggplant and put it in the right basket", - "subtask_index": 137 + "object_name": "plastic_cup", + "level1": "cups", + "level2": "plastic_cup", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the white eraser in the center of the table", - "subtask_index": 138 + "object_name": "green_dish", + "level1": "plates", + "level2": "green_dish", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the yogurt in the center of the table", - "subtask_index": 139 + "object_name": "pink_bowl", + "level1": "plastic_bowls", + "level2": "pink_bowl", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Pick up the detergent and put it in the left basket", - "subtask_index": 140 - }, + "object_name": "solid", + "level1": "materials", + "level2": "solid", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use a gripper to pick up the cup and pour the solid into a bowl or tray." + ], + "sub_tasks": [ { - "subtask": "Grasp the blue towel and put it in the left basket", - "subtask_index": 141 + "subtask": "Abnormal", + "subtask_index": 0 }, { - "subtask": "Pick up the red bull and put it in the right basket", - "subtask_index": 142 + "subtask": "Grasp the glass of shrimp with the left gripper", + "subtask_index": 1 }, { - "subtask": "Place the lime in the center of the table", - "subtask_index": 143 + "subtask": "Pour the shrimp beans into the green bowl with the left gripper", + "subtask_index": 2 }, { - "subtask": "Grasp the mint candy and put it in the right basket", - "subtask_index": 144 + "subtask": "Pour the shrimp beans into the green bowl with the right gripper", + "subtask_index": 3 }, { - "subtask": "Place the soap in the center of the table", - "subtask_index": 145 + "subtask": "Grasp the glass of coffee beans with the right gripper", + "subtask_index": 4 }, { - "subtask": "Grasp the red marker and put it in the left basket", - "subtask_index": 146 + "subtask": "Grasp the glass of coffee beans with the left gripper", + "subtask_index": 5 }, { - "subtask": "Pick up the purple garbage bag and put it in the left basket", - "subtask_index": 147 + "subtask": "Place the glass cup with the right gripper", + "subtask_index": 6 }, { - "subtask": "Place the detergent in the center of the table", - "subtask_index": 148 + "subtask": "Pour the shrimp into the pink bowl with the left gripper", + "subtask_index": 7 }, { - "subtask": "Pick up the bath ball and put it in the left basket", - "subtask_index": 149 + "subtask": "End", + "subtask_index": 8 }, { - "subtask": "Grasp the gray towel and put it in the left basket", - "subtask_index": 150 + "subtask": "Pour the coffee beans into the green bowl with the right gripper", + "subtask_index": 9 }, { - "subtask": "Grasp the yellow marker and put it in the left basket", - "subtask_index": 151 + "subtask": "Pour the coffee beans into the pink bowl with the left gripper", + "subtask_index": 10 }, { - "subtask": "Grasp the pink towel and put it in the left basket", - "subtask_index": 152 + "subtask": "Pour the shrimp into the pink bowl with the right gripper", + "subtask_index": 11 }, { - "subtask": "End", - "subtask_index": 153 + "subtask": "Pour the coffee beans into the pink bowl with the right gripper", + "subtask_index": 12 }, { - "subtask": "Place the shampoo in the center of the table", - "subtask_index": 154 + "subtask": "Grasp the glass of shrimp with the right gripper", + "subtask_index": 13 }, { - "subtask": "Grasp the green lemon and put it in the right basket", - "subtask_index": 155 + "subtask": "Place the glass cup with the left gripper", + "subtask_index": 14 }, { - "subtask": "Grasp the round chewing gum and put it in the right basket", - "subtask_index": 156 + "subtask": "Pour the coffee beans into the green bowl with the left gripper", + "subtask_index": 15 }, { "subtask": "null", - "subtask_index": 157 + "subtask_index": 16 } ], "atomic_actions": [ "grasp", "pick", - "place" + "place", + "pour" ], "robot_name": [ "Galaxea_R1_Lite" @@ -101603,23 +110040,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 224, - "total_frames": 251221, + "total_episodes": 85, + "total_frames": 31250, "fps": 30, - "total_tasks": 158, - "total_videos": 896, + "total_tasks": 17, + "total_videos": 340, "total_chunks": 1, "chunks_size": 1000, "state_dim": 14, "action_dim": 14, "camera_views": 4, - "dataset_size": "25.62 GB" + "dataset_size": "1.29 GB" }, - "frame_num": 251221, - "dataset_size": "25.62 GB", - "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_classify_object_green_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (212 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 31250, + "dataset_size": "1.29 GB", + "data_structure": "Galaxea_R1_Lite_pour_solid_marble_bar_counter_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (73 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:223" + "train": "0:84" }, "features": { "observation.images.cam_head_left_rgb": { @@ -101989,110 +110426,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "AgiBot-g1_robotic_arm_picks_up_parts": { - "path": "AgiBot-g1_robotic_arm_picks_up_parts", - "dataset_name": "robotic_arm_picks_up_parts", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place the hard drive bracket into the box.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "black_container", - "level1": "container", - "level2": "black_container", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "parts", - "level1": "mechanical_parts", - "level2": "parts", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-30190", - "dataset_size": "15.0GB", - "statistics": { - "total_episodes": 53, - "total_frames": 30190, - "total_tasks": 1, - "total_videos": 424, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "9d81f45b-a37d-46ad-b5cd-899737518fb6", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the hard drive bracket into the box.", - "Pick up the hard drive bracket from the feeding area.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AgiBot-g1_robotic_arm_picks_up_parts_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_robotic_arm_picks_up_parts_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Airbot_MMK2_move_medicine_bottle": { + "Airbot_MMK2_take_BBs_block": { "task_categories": [ "robotics" ], @@ -102122,11 +110456,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_move_medicine_bottle", + "dataset_name": "Airbot_MMK2_take_BBs_block", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "medical_healthcare", - "level2": "pharmacy", + "level1": "household", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -102134,25 +110468,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "medicine_bottle", - "level1": "container", - "level2": "medicine_bottle", + "object_name": "rectangular_building_blocks", + "level1": "building_blocks", + "level2": "rectangular_building_blocks", "level3": null, "level4": null, "level5": null }, { - "object_name": "board", - "level1": "material", - "level2": "board", + "object_name": "bb_pellets", + "level1": "ball", + "level2": "bb_pellets", "level3": null, "level4": null, "level5": null @@ -102160,40 +110486,36 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "place the medicine bottles on the table onto the board with both hands." + "take the rectangular building blocks and bullets out of the plate by hand and place them on the table." ], "sub_tasks": [ { - "subtask": "Place the white bottle on the white board with the right gripper", + "subtask": "Grasp the green rectangular block on the plate with the left gripper", "subtask_index": 0 }, { - "subtask": "Abnormal", + "subtask": "Grasp the bullet on the plate with the right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the white bottle with the left gripper", + "subtask": "End", "subtask_index": 2 }, { - "subtask": "Static", + "subtask": "Abnormal", "subtask_index": 3 }, { - "subtask": "End", + "subtask": "Place the bullet on the table with the right gripper", "subtask_index": 4 }, { - "subtask": "Grasp the white bottle the right gripper", + "subtask": "Place the green rectangular block on the table with the left gripper", "subtask_index": 5 }, - { - "subtask": "Place the white bottle on the white board with the left gripper", - "subtask_index": 6 - }, { "subtask": "null", - "subtask_index": 7 + "subtask_index": 6 } ], "atomic_actions": [ @@ -102233,23 +110555,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 16815, + "total_episodes": 43, + "total_frames": 13927, "fps": 30, - "total_tasks": 8, - "total_videos": 200, + "total_tasks": 7, + "total_videos": 172, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "493.20 MB" + "dataset_size": "449.13 MB" }, - "frame_num": 16815, - "dataset_size": "493.20 MB", - "data_structure": "Airbot_MMK2_move_medicine_bottle_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 13927, + "dataset_size": "449.13 MB", + "data_structure": "Airbot_MMK2_take_BBs_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (31 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:49" + "train": "0:42" }, "features": { "observation.images.cam_head_rgb": { @@ -102603,7 +110925,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Galaxea_R1_Lite_mix_color": { + "Airbot_MMK2_close_door_right": { "task_categories": [ "robotics" ], @@ -102633,11 +110955,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_mix_color", + "dataset_name": "Airbot_MMK2_close_door_right", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "office_workspace", - "level2": "office", + "level1": "household", + "level2": "bedroom", "level3": null, "level4": null, "level5": null @@ -102645,49 +110967,9 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "blue_pigment", - "level1": "materials", - "level2": "blue_pigment", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "red_pigment", - "level1": "materials", - "level2": "red_pigment", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "white_pigment", - "level1": "materials", - "level2": "white_pigment", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "test_tube_rack", - "level1": "holding_utensils", - "level2": "test_tube_rack", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "large_test_tubes", - "level1": "laboratory_supplies", - "level2": "large_test_tubes", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "beaker", - "level1": "laboratory_supplies", - "level2": "holding_utensils", + "object_name": "cabinet", + "level1": "home_storage", + "level2": "cabinet", "level3": null, "level4": null, "level5": null @@ -102695,96 +110977,45 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the test tube with red pigment the test tube with blue pigment and the test tube with white pigment by grippers and pour them into the beaker." + "close the cabinet door with your right hand." ], "sub_tasks": [ { - "subtask": "Pour the red reagent into the graduated cylinder and place the test tube into the paper cup", + "subtask": "End", "subtask_index": 0 }, { - "subtask": "Pour the orange reagent into the graduated cylinder and place the test tube into the paper cup", + "subtask": "Close the cupboard door with the right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the red reagent with the left gripper", + "subtask": "Touch the door with the right gripper", "subtask_index": 2 }, - { - "subtask": "Pour the white reagent into the graduated cylinder with the right gripper", - "subtask_index": 3 - }, - { - "subtask": "Grasp the red reagent with the right gripper", - "subtask_index": 4 - }, - { - "subtask": "Pour the blue reagent into the graduated cylinder and place the test tube into the paper cup", - "subtask_index": 5 - }, - { - "subtask": "Pour the blue reagent into the graduated cylinder with the left gripper", - "subtask_index": 6 - }, - { - "subtask": "Pour the red reagent into the graduated cylinder with the left gripper", - "subtask_index": 7 - }, - { - "subtask": "End", - "subtask_index": 8 - }, - { - "subtask": "Place the test tube into the paper cup with the right gripper", - "subtask_index": 9 - }, - { - "subtask": "Place the test tube into the paper cup with the left gripper", - "subtask_index": 10 - }, - { - "subtask": "Grasp the blue reagent with the left gripper", - "subtask_index": 11 - }, - { - "subtask": "Pour the red reagent into the graduated cylinder with the right gripper", - "subtask_index": 12 - }, - { - "subtask": "Grasp the white reagent with the right gripper", - "subtask_index": 13 - }, - { - "subtask": "Pour the white reagent into the graduated cylinder and place the test tube into the paper cup", - "subtask_index": 14 - }, { "subtask": "null", - "subtask_index": 15 + "subtask_index": 3 } ], "atomic_actions": [ - "grasp", - "pick", - "place", - "pour" + "push" ], "robot_name": [ - "Galaxea_R1_Lite" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -102801,30 +111032,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 79584, + "total_episodes": 47, + "total_frames": 5437, "fps": 30, - "total_tasks": 16, - "total_videos": 200, + "total_tasks": 4, + "total_videos": 188, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, + "state_dim": 36, + "action_dim": 36, "camera_views": 4, - "dataset_size": "2.99 GB" + "dataset_size": "148.62 MB" }, - "frame_num": 79584, - "dataset_size": "2.99 GB", - "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_mix_color_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 5437, + "dataset_size": "148.62 MB", + "data_structure": "Airbot_MMK2_close_door_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:49" + "train": "0:46" }, "features": { - "observation.images.cam_head_left_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -102833,8 +111064,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -102843,11 +111074,11 @@ "has_audio": false } }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -102856,8 +111087,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -102866,10 +111097,10 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -102879,7 +111110,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -102889,10 +111120,10 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_front_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -102902,7 +111133,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -102915,7 +111146,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -102930,14 +111161,36 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -102952,8 +111205,30 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -103104,66 +111379,6 @@ 2 ], "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" } }, "authors": { @@ -103185,22 +111400,20 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "AIRBOT_MMK2_organize_books": { - "path": "AIRBOT_MMK2_organize_books", - "dataset_name": "organize_books", + "Cobot_Magic_turn_on_the_bulb": { + "path": "Cobot_Magic_turn_on_the_bulb", + "dataset_name": "turn_on_the_bulb", "robot_type": "", "end_effector_type": [ - "five_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ - "grasp", - "pick", - "place" + "press" ], - "tasks": "Static", + "tasks": "End", "objects": [ { "object_name": "table", @@ -103211,35 +111424,27 @@ "level5": null }, { - "object_name": "book", - "level1": "office_supplies", - "level2": "book", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bookshelf", - "level1": "container", - "level2": "bookshelf", + "object_name": "bulb", + "level1": "tool", + "level2": "bulb", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-9576", - "dataset_size": "439.6MB", + "frame_range": "0-2216", + "dataset_size": "52.8MB", "statistics": { - "total_episodes": 50, - "total_frames": 9576, + "total_episodes": 11, + "total_frames": 2216, "total_tasks": 1, - "total_videos": 200, + "total_videos": 33, "total_chunks": 1, "chunks_size": 1000, - "fps": 30 + "fps": 50 }, - "dataset_uuid": "b36aed44-2805-4713-8d48-a14bebf79671", + "dataset_uuid": "7bd56e56-b106-4062-8952-e5bffc346b4c", "language": [ "en", "zh" @@ -103248,10 +111453,8 @@ "robotics" ], "sub_tasks": [ - "Static", - "Grasp the yellow book with the right gripper", - "Place the yellow book into the front mezzanine of the bookshelf with the right gripper", "End", + "Turn on the switch", "null" ], "annotations": { @@ -103289,22 +111492,21 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_organize_books_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_organize_books_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Cobot_Magic_turn_on_the_bulb_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_turn_on_the_bulb_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "RMC-AIDA-L_fold_towel": { - "path": "RMC-AIDA-L_fold_towel", - "dataset_name": "fold_towel", + "R1_Lite_move_the_position_of_the_apple": { + "path": "R1_Lite_move_the_position_of_the_apple", + "dataset_name": "move_the_position_of_the_apple", "robot_type": "", "end_effector_type": [ "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ - "grasp", "place", "pick", - "unfold" + "grasp" ], "tasks": "End", "objects": [ @@ -103316,6 +111518,158 @@ "level4": null, "level5": null }, + { + "object_name": "pen", + "level1": "office_supplies", + "level2": "pen", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "fruit", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "apple", + "level1": "fruit", + "level2": "apple", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bath_ball", + "level1": "daily_necessities", + "level2": "bath_ball", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "container", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "container", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "can", + "level1": "container", + "level2": "can", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eraser", + "level1": "office_supplies", + "level2": "eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "hard_cleanser", + "level1": "daily_necessities", + "level2": "hard_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "long_bread", + "level1": "food", + "level2": "long_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peeler", + "level1": "tool", + "level2": "peeler", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "block", + "level1": "toy", + "level2": "block", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "duck", + "level1": "toy", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "soap", + "level1": "daily_necessities", + "level2": "soap", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "box", + "level1": "container", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "basket", + "level1": "container", + "level2": "basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cola", + "level1": "drink", + "level2": "cola", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "detergent", + "level1": "daily_necessities", + "level2": "detergent", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "egg_beater", + "level1": "electrical_appliances", + "level2": "egg_beater", + "level3": null, + "level4": null, + "level5": null + }, { "object_name": "towel", "level1": "clothing", @@ -103323,254 +111677,205 @@ "level3": null, "level4": null, "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-139069", - "dataset_size": "1.5GB", - "statistics": { - "total_episodes": 314, - "total_frames": 139069, - "total_tasks": 2, - "total_videos": 942, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "189eaf6e-2609-4e71-8e6e-e6393a5d259e", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "End", - "Press and secure one corner of the towel with the right arm", - "Unfold the towel with the left arm", - "Press and secure one corner of the towel with the left arm", - "Press and secure one corner of the towel with the right gripper", - "Unfold the towel with the left gripper", - "Unfold the towel with the right arm", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "RMC-AIDA-L_fold_towel_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_fold_towel_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Cobot_Magic_box_storage_chopsticks": { - "path": "Cobot_Magic_box_storage_chopsticks", - "dataset_name": "box_storage_chopsticks", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place", - "pick" - ], - "tasks": "use the right arm to put the chopsticks into the lunch box", - "objects": [ + }, + { + "object_name": "orange", + "level1": "fruit", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "fruit", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "marker", + "level1": "office_supplies", + "level2": "marker", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rubiks_cube", + "level1": "toy", + "level2": "rubiks_cube", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread_slice", + "level1": "food", + "level2": "bread_slice", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brush", + "level1": "daily_necessities", + "level2": "brush", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yogurt", + "level1": "drink", + "level2": "yogurt", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "power_strip", + "level1": "electric_appliance", + "level2": "power_strip", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "milk", + "level1": "drink", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "soda", + "level1": "drink", + "level2": "soda", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "lime", + "level1": "fruit", + "level2": "lime", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "coffee_capsule", + "level1": "drink", + "level2": "coffee_capsule", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "dish", + "level1": "container", + "level2": "dish", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "glass", + "level1": "furniture", + "level2": "glass", + "level3": null, + "level4": null, + "level5": null + }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "egg_yolk_pastry", + "level1": "food", + "level2": "egg_yolk_pastry", "level3": null, "level4": null, "level5": null }, { - "object_name": "tablecloth", - "level1": "clothing", - "level2": "tablecloth", + "object_name": "glasses_case", + "level1": "daily_necessities", + "level2": "glasses_case", "level3": null, "level4": null, "level5": null }, { - "object_name": "pallet", - "level1": "container", - "level2": "pallet", + "object_name": "gum", + "level1": "daily_necessities", + "level2": "gum", "level3": null, "level4": null, "level5": null }, { - "object_name": "chopsticks", - "level1": "tableware", - "level2": "chopsticks", + "object_name": "tape", + "level1": "daily_necessities", + "level2": "tape", "level3": null, "level4": null, "level5": null }, { - "object_name": "box", - "level1": "container", - "level2": "box", + "object_name": "soft_cleanser", + "level1": "daily_necessities", + "level2": "soft_cleanser", "level3": null, "level4": null, "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-170337", - "dataset_size": "6.4GB", - "statistics": { - "total_episodes": 499, - "total_frames": 170337, - "total_tasks": 6, - "total_videos": 1497, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "820026e7-2878-4127-b935-c5e584a825bc", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "use the right arm to put the chopsticks into the lunch box", - "Abnormal", - "use the left arm to grab a pair of chopsticks", - "End", - "Grasp the a pair of chopsticks with the left gripper", - "Place the a pair of chopsticks in the box with the left gripper", - "use the left arm to put the chopsticks into the lunch box", - "use the right arm to grab a pair of chopsticks", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_box_storage_chopsticks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_box_storage_chopsticks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "R1_Lite_peach_storage": { - "path": "R1_Lite_peach_storage", - "dataset_name": "peach_storage", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place", - "pick" - ], - "tasks": "End", - "objects": [ + }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "chips", + "level1": "food", + "level2": "chips", "level3": null, "level4": null, "level5": null }, { - "object_name": "basket", - "level1": "container", - "level2": "basket", + "object_name": "chocolate", + "level1": "food", + "level2": "chocolate", "level3": null, "level4": null, "level5": null }, { - "object_name": "peach", - "level1": "fruit", - "level2": "peach", + "object_name": "cookie", + "level1": "food", + "level2": "cookie", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "spoon", + "level1": "tableware", + "level2": "spoon", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-31755", - "dataset_size": "1.5GB", + "frame_range": "0-5959", + "dataset_size": "223.0MB", "statistics": { - "total_episodes": 110, - "total_frames": 31755, + "total_episodes": 35, + "total_frames": 5959, "total_tasks": 1, - "total_videos": 440, + "total_videos": 140, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "e596c797-57d3-4fbb-ba99-5dec78a4c0b2", + "dataset_uuid": "9155fbd7-8bc2-44d0-a2dc-16b1fad44af0", "language": [ "en", "zh" @@ -103580,11 +111885,12 @@ ], "sub_tasks": [ "End", - "Place the light basket on the middle of the table with the left gripper", - "Grasp the light basket with the left gripper", + "Place the apple on the table with left gripper", + "Grasp the apple with left gripper", + "Place the apple on the table with right gripper", + "Grasp the apple with right gripper", "Static", - "Grasp the peach with the right gripper", - "Place the peach into the light basket with the right gripper", + "Abnormal", "null" ], "annotations": { @@ -103622,10 +111928,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_peach_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_peach_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "R1_Lite_move_the_position_of_the_apple_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_move_the_position_of_the_apple_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Galaxea_R1_Lite_pour_solid_marble_bar_counter": { + "Airbot_MMK2_take_block": { "task_categories": [ "robotics" ], @@ -103655,11 +111961,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_pour_solid_marble_bar_counter", + "dataset_name": "Airbot_MMK2_take_block", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "househhold", - "level2": "kitchen", + "level1": "household", + "level2": "study_room", "level3": null, "level4": null, "level5": null @@ -103667,41 +111973,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "marble_bar_counter", - "level1": "furniture", - "level2": "marble_bar_counter", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plastic_cup", - "level1": "cups", - "level2": "plastic_cup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "green_dish", - "level1": "plates", - "level2": "green_dish", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pink_bowl", - "level1": "plastic_bowls", - "level2": "pink_bowl", + "object_name": "building_blocks", + "level1": "toys", + "level2": "building_blocks", "level3": null, "level4": null, "level5": null }, { - "object_name": "solid", - "level1": "materials", - "level2": "solid", + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", "level3": null, "level4": null, "level5": null @@ -103709,100 +111991,55 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "use a gripper to pick up the cup and pour the solid into a bowl or tray." + "take the building blocks off the white plate by hands." ], "sub_tasks": [ { - "subtask": "Abnormal", + "subtask": "End", "subtask_index": 0 }, { - "subtask": "Grasp the glass of shrimp with the left gripper", + "subtask": "Place red rectangular block on the table with the left gripper", "subtask_index": 1 }, { - "subtask": "Pour the shrimp beans into the green bowl with the left gripper", + "subtask": "Grasp the red rectangular block on the plate with the left gripper", "subtask_index": 2 }, { - "subtask": "Pour the shrimp beans into the green bowl with the right gripper", + "subtask": "Grasp the green rectangular block on the plate with the right gripper", "subtask_index": 3 }, { - "subtask": "Grasp the glass of coffee beans with the right gripper", + "subtask": "Place green rectangular block on the table with the right gripper", "subtask_index": 4 }, - { - "subtask": "Grasp the glass of coffee beans with the left gripper", - "subtask_index": 5 - }, - { - "subtask": "Place the glass cup with the right gripper", - "subtask_index": 6 - }, - { - "subtask": "Pour the shrimp into the pink bowl with the left gripper", - "subtask_index": 7 - }, - { - "subtask": "End", - "subtask_index": 8 - }, - { - "subtask": "Pour the coffee beans into the green bowl with the right gripper", - "subtask_index": 9 - }, - { - "subtask": "Pour the coffee beans into the pink bowl with the left gripper", - "subtask_index": 10 - }, - { - "subtask": "Pour the shrimp into the pink bowl with the right gripper", - "subtask_index": 11 - }, - { - "subtask": "Pour the coffee beans into the pink bowl with the right gripper", - "subtask_index": 12 - }, - { - "subtask": "Grasp the glass of shrimp with the right gripper", - "subtask_index": 13 - }, - { - "subtask": "Place the glass cup with the left gripper", - "subtask_index": 14 - }, - { - "subtask": "Pour the coffee beans into the green bowl with the left gripper", - "subtask_index": 15 - }, { "subtask": "null", - "subtask_index": 16 + "subtask_index": 5 } ], "atomic_actions": [ "grasp", "pick", - "place", - "pour" + "place" ], "robot_name": [ - "Galaxea_R1_Lite" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -103819,30 +112056,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 85, - "total_frames": 31250, + "total_episodes": 47, + "total_frames": 10270, "fps": 30, - "total_tasks": 17, - "total_videos": 340, + "total_tasks": 6, + "total_videos": 188, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, + "state_dim": 36, + "action_dim": 36, "camera_views": 4, - "dataset_size": "1.29 GB" + "dataset_size": "334.13 MB" }, - "frame_num": 31250, - "dataset_size": "1.29 GB", - "data_structure": "Galaxea_R1_Lite_pour_solid_marble_bar_counter_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (73 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 10270, + "dataset_size": "334.13 MB", + "data_structure": "Airbot_MMK2_take_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:84" + "train": "0:46" }, "features": { - "observation.images.cam_head_left_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -103851,8 +112088,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -103861,11 +112098,11 @@ "has_audio": false } }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -103874,8 +112111,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -103884,10 +112121,10 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -103897,7 +112134,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -103907,10 +112144,10 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_front_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -103920,7 +112157,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -103933,7 +112170,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -103948,14 +112185,36 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -103970,8 +112229,30 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -104122,66 +112403,6 @@ 2 ], "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" } }, "authors": { @@ -104203,9 +112424,104 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_take_BBs_block": { + "G1edu-u3_pick_up_the_toy_ai": { + "path": "G1edu-u3_pick_up_the_toy_ai", + "dataset_name": "pick_up_the_toy_ai", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick" + ], + "tasks": "End", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "toy", + "level1": "toy", + "level2": "toy", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-3242", + "dataset_size": "44.8MB", + "statistics": { + "total_episodes": 10, + "total_frames": 3242, + "total_tasks": 1, + "total_videos": 10, + "total_chunks": 1, + "chunks_size": 10, + "fps": 30 + }, + "dataset_uuid": "804260a5-c07f-432e-9080-8112b837464a", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Grasp the toy bear and lift it to the center of the view with right gripper", + "Grasp the toy bear and lift it to the center of the view with left gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_pick_up_the_toy_ai_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_pick_up_the_toy_ai_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_move_cake": { "task_categories": [ "robotics" ], @@ -104235,11 +112551,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_take_BBs_block", + "dataset_name": "Airbot_MMK2_move_cake", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "living_room", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -104247,17 +112563,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "rectangular_building_blocks", - "level1": "building_blocks", - "level2": "rectangular_building_blocks", + "object_name": "shelf", + "level1": "home_storage", + "level2": "shelf", "level3": null, "level4": null, "level5": null }, { - "object_name": "bb_pellets", - "level1": "ball", - "level2": "bb_pellets", + "object_name": "cake", + "level1": "bread", + "level2": "cake", "level3": null, "level4": null, "level5": null @@ -104265,36 +112581,56 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "take the rectangular building blocks and bullets out of the plate by hand and place them on the table." + "place two cakes on the shelf with each hand respectively." ], "sub_tasks": [ { - "subtask": "Grasp the green rectangular block on the plate with the left gripper", + "subtask": "Place the ice cream into the plate with the right gripper", "subtask_index": 0 }, { - "subtask": "Grasp the bullet on the plate with the right gripper", + "subtask": "Place the cake onto the block toy with the right gripper", "subtask_index": 1 }, { - "subtask": "End", + "subtask": "Grasp the cake with the left gripper", "subtask_index": 2 }, { - "subtask": "Abnormal", + "subtask": "Place the cake on the yellow cube block with the left gripper", "subtask_index": 3 }, { - "subtask": "Place the bullet on the table with the right gripper", + "subtask": "Place the cake on the blue cube block with the right gripper", "subtask_index": 4 }, { - "subtask": "Place the green rectangular block on the table with the left gripper", + "subtask": "Place the cake onto the block toy with the left gripper", "subtask_index": 5 }, { - "subtask": "null", + "subtask": "Grasp the cake with the right gripper", "subtask_index": 6 + }, + { + "subtask": "Static", + "subtask_index": 7 + }, + { + "subtask": "Grasp the cake from the table and with the right gripper", + "subtask_index": 8 + }, + { + "subtask": "Grasp the cake from the table and with the left gripper", + "subtask_index": 9 + }, + { + "subtask": "End", + "subtask_index": 10 + }, + { + "subtask": "null", + "subtask_index": 11 } ], "atomic_actions": [ @@ -104334,23 +112670,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 43, - "total_frames": 13927, + "total_episodes": 94, + "total_frames": 22873, "fps": 30, - "total_tasks": 7, - "total_videos": 172, + "total_tasks": 12, + "total_videos": 376, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "449.13 MB" + "dataset_size": "1009.39 MB" }, - "frame_num": 13927, - "dataset_size": "449.13 MB", - "data_structure": "Airbot_MMK2_take_BBs_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (31 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 22873, + "dataset_size": "1009.39 MB", + "data_structure": "Airbot_MMK2_move_cake_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (82 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:42" + "train": "0:93" }, "features": { "observation.images.cam_head_rgb": { @@ -104672,39 +113008,558 @@ ], "dtype": "int32" }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "AIRBOT_MMK2_store_coffee_cups": { + "path": "AIRBOT_MMK2_store_coffee_cups", + "dataset_name": "store_coffee_cups", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick" + ], + "tasks": "Grasp the cup the right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "coffee_cup", + "level1": "container", + "level2": "coffee_cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "basin", + "level1": "container", + "level2": "basin", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-11166", + "dataset_size": "444.8MB", + "statistics": { + "total_episodes": 50, + "total_frames": 11166, + "total_tasks": 1, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "38759f00-3a88-419c-b74b-d8a66882a254", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the cup the right gripper", + "place the cup in the basin use the left gripper", + "End", + "Grasp the cup the left gripper", + "place the cup in the basin use the right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AIRBOT_MMK2_store_coffee_cups_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_store_coffee_cups_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "leju_robot_hotel_services_e": { + "path": "leju_robot_hotel_services_e", + "dataset_name": "hotel_services_e", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Hand the room key to the person.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "box", + "level1": "home_storage", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "card", + "level1": "nfc", + "level2": "card", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "sensor", + "level1": "electronic_products", + "level2": "sensor", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-212885", + "dataset_size": "14.2GB", + "statistics": { + "total_episodes": 309, + "total_frames": 212885, + "total_tasks": 1, + "total_videos": 927, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "4d57f5ee-7850-4fb4-8532-415c1821c756", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Hand the room key to the person.", + "Place the ID card on the card reader.", + "Take the ID card from the person's hand.", + "Pick up the room key from the key card box.", + "Hand the ID card to the person.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_hotel_services_e_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_hotel_services_e_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "AgiBot-g1_picks_up_battery_a": { + "path": "AgiBot-g1_picks_up_battery_a", + "dataset_name": "picks_up_battery_a", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "place", + "pick", + "grasp" + ], + "tasks": "Place the power supply on the operating table.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "carton", + "level1": "container", + "level2": "carton", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "battery", + "level1": "tool", + "level2": "battery", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-214263", + "dataset_size": "119.7GB", + "statistics": { + "total_episodes": 562, + "total_frames": 214263, + "total_tasks": 1, + "total_videos": 4496, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "3385f559-eb6d-46ff-94d3-a6b48d250220", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the power supply on the operating table.", + "Grab and lift the power supply from the large box.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AgiBot-g1_picks_up_battery_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_picks_up_battery_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "AgiBot-g1_mobile_accessory_storage_box_a": { + "path": "AgiBot-g1_mobile_accessory_storage_box_a", + "dataset_name": "mobile_accessory_storage_box_a", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick" + ], + "tasks": "Place the open mouse box onto the operation table.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "box", + "level1": "container", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "accessories", + "level1": "electronic_products", + "level2": "accessories", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-40631", + "dataset_size": "16.7GB", + "statistics": { + "total_episodes": 81, + "total_frames": 40631, + "total_tasks": 1, + "total_videos": 648, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "811a660c-6371-490a-9141-3019afa0cac2", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the open mouse box onto the operation table.", + "Grab and pick up the open mouse box from the accessory packaging area.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AgiBot-g1_mobile_accessory_storage_box_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_mobile_accessory_storage_box_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Cobot_Magic_pushing_magnet": { + "path": "Cobot_Magic_pushing_magnet", + "dataset_name": "pushing_magnet", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "push" + ], + "tasks": "End", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "magnet", + "level1": "tool", + "level2": "magnet", + "level3": null, + "level4": null, + "level5": null } + ], + "operation_platform_height": 77.2, + "frame_range": "0-23553", + "dataset_size": "423.5MB", + "statistics": { + "total_episodes": 100, + "total_frames": 23553, + "total_tasks": 1, + "total_videos": 300, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "ad07d99e-6bee-4147-b81a-8f2dfe943ed8", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Push the magnet on the right to connect the magnet on the left", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" }, "authors": { "contributed_by": [ { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } ] }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_pushing_magnet_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_pushing_magnet_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_close_door_right": { + "Realman_RMC-AIDA-L_storage_peach_box": { "task_categories": [ "robotics" ], @@ -104734,11 +113589,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_close_door_right", + "dataset_name": "Realman_RMC-AIDA-L_storage_peach_box", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "bedroom", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -104746,9 +113601,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "cabinet", + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "black_box", "level1": "home_storage", - "level2": "cabinet", + "level2": "black_box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "fruit", + "level2": "peach", "level3": null, "level4": null, "level5": null @@ -104756,51 +113627,59 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "close the cabinet door with your right hand." + "the left gripper open the box, the right gripper grasp the peach on the table and place it into the box." ], "sub_tasks": [ { - "subtask": "End", + "subtask": "Close the box with the left gripper", "subtask_index": 0 }, { - "subtask": "Close the cupboard door with the right gripper", + "subtask": "Open the lid with the left gripper", "subtask_index": 1 }, { - "subtask": "Touch the door with the right gripper", + "subtask": "End", "subtask_index": 2 }, { - "subtask": "null", + "subtask": "Grasp the peach with the right gripper", "subtask_index": 3 + }, + { + "subtask": "Place the peach into the box with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 } ], "atomic_actions": [ - "push" + "grasp", + "pick", + "place" ], "robot_name": [ - "Airbot_MMK2" + "Realman_RMC-AIDA-L" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -104811,23 +113690,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 47, - "total_frames": 5437, + "total_episodes": 118, + "total_frames": 78472, "fps": 30, - "total_tasks": 4, - "total_videos": 188, + "total_tasks": 6, + "total_videos": 354, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "148.62 MB" + "state_dim": 28, + "action_dim": 28, + "camera_views": 3, + "dataset_size": "751.68 MB" }, - "frame_num": 5437, - "dataset_size": "148.62 MB", - "data_structure": "Airbot_MMK2_close_door_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 78472, + "dataset_size": "751.68 MB", + "data_structure": "Realman_RMC-AIDA-L_storage_peach_box_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (106 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:46" + "train": "0:117" }, "features": { "observation.images.cam_head_rgb": { @@ -104899,115 +113778,76 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, "observation.state": { "dtype": "float32", "shape": [ - 36 + 28 ], "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_arm_joint_7_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad", + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_arm_joint_7_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 28 ], "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_arm_joint_7_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad", + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_arm_joint_7_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -105158,6 +113998,66 @@ 2 ], "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" } }, "authors": { @@ -105179,20 +114079,436 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Cobot_Magic_turn_on_the_bulb": { - "path": "Cobot_Magic_turn_on_the_bulb", - "dataset_name": "turn_on_the_bulb", + "leju_robot_moving_parts_b": { + "path": "leju_robot_moving_parts_b", + "dataset_name": "moving_parts_b", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Insert the small parts into the corresponding holes on the workbench.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "parts", + "level1": "mechanical_parts", + "level2": "parts", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cabinet", + "level1": "home_storage", + "level2": "cabinet", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-708011", + "dataset_size": "43.3GB", + "statistics": { + "total_episodes": 490, + "total_frames": 708011, + "total_tasks": 1, + "total_videos": 1470, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "b23f0a82-6d8f-4f98-b818-5e10125816cc", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Insert the small parts into the corresponding holes on the workbench.", + "End", + "Place the white part on the table with left gripper", + "Grasp the white part with left gripper", + "Move the small parts to the workbench.", + "Move to the table behind body", + "Take small components from the rack.", + "Pick up small parts from the shelf.", + "Move to the initial position of the workbench.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_moving_parts_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_moving_parts_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "G1edu-u3_place_plastic_bowl_ah": { + "path": "G1edu-u3_place_plastic_bowl_ah", + "dataset_name": "place_plastic_bowl_ah", + "robot_type": "", + "end_effector_type": [ + "three_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "pick", + "place" + ], + "tasks": "Place the plastic bowl on the table with right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plastic_bowl", + "level1": "container", + "level2": "plastic_bowl", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-5638", + "dataset_size": "83.7MB", + "statistics": { + "total_episodes": 33, + "total_frames": 5638, + "total_tasks": 1, + "total_videos": 33, + "total_chunks": 1, + "chunks_size": 34, + "fps": 30 + }, + "dataset_uuid": "7153928d-6c50-4662-91a7-9d48d64e1e5d", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the plastic bowl on the table with right gripper", + "End", + "Place the plastic bowl on the table with left gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "G1edu-u3_place_plastic_bowl_ah_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_place_plastic_bowl_ah_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "AgiBot-g1_battery_storage_b": { + "path": "AgiBot-g1_battery_storage_b", + "dataset_name": "battery_storage_b", "robot_type": "", "end_effector_type": [ "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ - "press" + "grasp", + "place", + "pick" + ], + "tasks": "Place the power supply on the operating table.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "box", + "level1": "container", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "battery", + "level1": "electronic_products", + "level2": "battery", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-57968", + "dataset_size": "29.6GB", + "statistics": { + "total_episodes": 115, + "total_frames": 57968, + "total_tasks": 1, + "total_videos": 920, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "632c0e2b-a8bb-4e12-9635-82df87be2096", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the power supply on the operating table.", + "Grab and lift the power supply from the large box.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AgiBot-g1_battery_storage_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_battery_storage_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "R1_Lite_throw_out_the_trash": { + "path": "R1_Lite_throw_out_the_trash", + "dataset_name": "throw_out_the_trash", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" ], "tasks": "End", + "objects": [ + { + "object_name": "trash_can", + "level1": "container", + "level2": "trash_can", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "garbage_bag", + "level1": "container", + "level2": "garbage_bag", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "the_door", + "level1": "furniture", + "level2": "the_door", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-143720", + "dataset_size": "6.0GB", + "statistics": { + "total_episodes": 109, + "total_frames": 143720, + "total_tasks": 1, + "total_videos": 327, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "1aceeebe-3221-4690-8d72-ca6f9ec512eb", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Place the outer trash can on the ground", + "Put the trash bag at the door", + "Pick up the trash bag", + "abnormal", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_throw_out_the_trash_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_throw_out_the_trash_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Cobot_Magic_the_box_stores_table_tennis_balls": { + "path": "Cobot_Magic_the_box_stores_table_tennis_balls", + "dataset_name": "the_box_stores_table_tennis_balls", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick" + ], + "tasks": "use the right arm to clamp the table tennis ball closest to it", "objects": [ { "object_name": "table", @@ -105203,27 +114519,174 @@ "level5": null }, { - "object_name": "bulb", - "level1": "tool", - "level2": "bulb", + "object_name": "box", + "level1": "container", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "table_tennis", + "level1": "toy", + "level2": "table_tennis", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tablecloth", + "level1": "clothing", + "level2": "tablecloth", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-2216", - "dataset_size": "52.8MB", + "operation_platform_height": null, + "frame_range": "0-438756", + "dataset_size": "19.9GB", + "statistics": { + "total_episodes": 477, + "total_frames": 438756, + "total_tasks": 5, + "total_videos": 1431, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "4af013a1-8691-4531-a11d-4a02b8ba9fdf", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "use the right arm to clamp the table tennis ball closest to it", + "use the right arm to put the table tennis ball into the box", + "use the left arm to clamp the table tennis ball closest to it", + "use the left arm to put the table tennis ball into the box", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_the_box_stores_table_tennis_balls_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_the_box_stores_table_tennis_balls_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "R1_Lite_take_and_put_away_items": { + "path": "R1_Lite_take_and_put_away_items", + "dataset_name": "take_and_put_away_items", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place", + "pull", + "push" + ], + "tasks": "Take the mineral water out of the drawer and put it on the table", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "drawer", + "level1": "container", + "level2": "drawer", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "umbrella", + "level1": "daily_necessities", + "level2": "umbrella", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "data_cable", + "level1": "electronic_products", + "level2": "data_cable", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mineral_water", + "level1": "beverages", + "level2": "mineral_water", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "storage_box", + "level1": "container", + "level2": "storage_box", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-137069", + "dataset_size": "6.9GB", "statistics": { - "total_episodes": 11, - "total_frames": 2216, + "total_episodes": 112, + "total_frames": 137069, "total_tasks": 1, - "total_videos": 33, + "total_videos": 336, "total_chunks": 1, "chunks_size": 1000, - "fps": 50 + "fps": 30 }, - "dataset_uuid": "7bd56e56-b106-4062-8952-e5bffc346b4c", + "dataset_uuid": "fa3f7148-5f74-444e-82a5-9f8b31a69987", "language": [ "en", "zh" @@ -105232,8 +114695,16 @@ "robotics" ], "sub_tasks": [ - "End", - "Turn on the switch", + "Take the mineral water out of the drawer and put it on the table", + "Open the drawer", + "Take the data cable out of the drawer and put it on the table", + "Take the umbrella out of the drawer and put it on the table", + "Take the storage box out of the drawer and put it on the table", + "Put the storage box in the drawer", + "Close the drawer", + "Put the umbrella in the drawer", + "Put the data cable in the drawer", + "Put the mineral water in the drawer", "null" ], "annotations": { @@ -105271,23 +114742,23 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Cobot_Magic_turn_on_the_bulb_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_turn_on_the_bulb_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "R1_Lite_take_and_put_away_items_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_take_and_put_away_items_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "R1_Lite_move_the_position_of_the_apple": { - "path": "R1_Lite_move_the_position_of_the_apple", - "dataset_name": "move_the_position_of_the_apple", + "RMC-AIDA-L_basket_storage_egg_yolk_pastry": { + "path": "RMC-AIDA-L_basket_storage_egg_yolk_pastry", + "dataset_name": "basket_storage_egg_yolk_pastry", "robot_type": "", "end_effector_type": [ "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ - "place", + "grasp", "pick", - "grasp" + "place" ], - "tasks": "End", + "tasks": "Place the basket in the center of view with left gripper", "objects": [ { "object_name": "table", @@ -105297,126 +114768,6 @@ "level4": null, "level5": null }, - { - "object_name": "pen", - "level1": "office_supplies", - "level2": "pen", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "fruit", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "apple", - "level1": "fruit", - "level2": "apple", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bath_ball", - "level1": "daily_necessities", - "level2": "bath_ball", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "can", - "level1": "container", - "level2": "can", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "eraser", - "level1": "office_supplies", - "level2": "eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "hard_cleanser", - "level1": "daily_necessities", - "level2": "hard_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "long_bread", - "level1": "food", - "level2": "long_bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peeler", - "level1": "tool", - "level2": "peeler", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "block", - "level1": "toy", - "level2": "block", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "duck", - "level1": "toy", - "level2": "duck", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soap", - "level1": "daily_necessities", - "level2": "soap", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "container", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, { "object_name": "basket", "level1": "container", @@ -105425,150 +114776,6 @@ "level4": null, "level5": null }, - { - "object_name": "cola", - "level1": "drink", - "level2": "cola", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "detergent", - "level1": "daily_necessities", - "level2": "detergent", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_beater", - "level1": "electrical_appliances", - "level2": "egg_beater", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "towel", - "level1": "clothing", - "level2": "towel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "orange", - "level1": "fruit", - "level2": "orange", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peach", - "level1": "fruit", - "level2": "peach", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "marker", - "level1": "office_supplies", - "level2": "marker", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "rubiks_cube", - "level1": "toy", - "level2": "rubiks_cube", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bread_slice", - "level1": "food", - "level2": "bread_slice", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "brush", - "level1": "daily_necessities", - "level2": "brush", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yogurt", - "level1": "drink", - "level2": "yogurt", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "power_strip", - "level1": "electric_appliance", - "level2": "power_strip", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "milk", - "level1": "drink", - "level2": "milk", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soda", - "level1": "drink", - "level2": "soda", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "lime", - "level1": "fruit", - "level2": "lime", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "coffee_capsule", - "level1": "drink", - "level2": "coffee_capsule", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "dish", - "level1": "container", - "level2": "dish", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "glass", - "level1": "furniture", - "level2": "glass", - "level3": null, - "level4": null, - "level5": null - }, { "object_name": "egg_yolk_pastry", "level1": "food", @@ -105576,85 +114783,21 @@ "level3": null, "level4": null, "level5": null - }, - { - "object_name": "glasses_case", - "level1": "daily_necessities", - "level2": "glasses_case", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "gum", - "level1": "daily_necessities", - "level2": "gum", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tape", - "level1": "daily_necessities", - "level2": "tape", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soft_cleanser", - "level1": "daily_necessities", - "level2": "soft_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chips", - "level1": "food", - "level2": "chips", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chocolate", - "level1": "food", - "level2": "chocolate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cookie", - "level1": "food", - "level2": "cookie", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "spoon", - "level1": "tableware", - "level2": "spoon", - "level3": null, - "level4": null, - "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-5959", - "dataset_size": "223.0MB", + "frame_range": "0-295062", + "dataset_size": "3.5GB", "statistics": { - "total_episodes": 35, - "total_frames": 5959, - "total_tasks": 1, - "total_videos": 140, + "total_episodes": 727, + "total_frames": 295062, + "total_tasks": 4, + "total_videos": 2181, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "9155fbd7-8bc2-44d0-a2dc-16b1fad44af0", + "dataset_uuid": "928d5a84-eae7-4fae-b272-9356c151aa6d", "language": [ "en", "zh" @@ -105663,13 +114806,23 @@ "robotics" ], "sub_tasks": [ + "Place the basket in the center of view with left gripper", + "Place the egg yolk pastry in the basket with right gripper", + "Move the basket to the center of view with the right gripper", + "Move the basket to the center of view with the left gripper", + "Pick up the egg yolk pastry with the right gripper", + "Place the basket in the center of view with right gripper", + "Pick up the basket with the left gripper", + "Place the basket in the center of view with the left gripper", + "Grasp egg yolk pastry with left gripper", + "Place the egg yolk pastry into the basket with the right gripper", + "Grasp the basket with right gripper", "End", - "Place the apple on the table with left gripper", - "Grasp the apple with left gripper", - "Place the apple on the table with right gripper", - "Grasp the apple with right gripper", - "Static", - "Abnormal", + "Grasp egg yolk pastry with right gripper", + "Place the egg yolk pastry in the basket with left gripper", + "Grasp the basket with left gripper", + "Place the egg yolk pastry into the basket with the left gripper", + "Pick up the egg yolk pastry with the left gripper", "null" ], "annotations": { @@ -105707,10 +114860,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_move_the_position_of_the_apple_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_move_the_position_of_the_apple_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "RMC-AIDA-L_basket_storage_egg_yolk_pastry_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_basket_storage_egg_yolk_pastry_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_take_block": { + "Airbot_MMK2_close_doors": { "task_categories": [ "robotics" ], @@ -105740,11 +114893,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_take_block", + "dataset_name": "Airbot_MMK2_close_doors", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "study_room", + "level2": "studroom", "level3": null, "level4": null, "level5": null @@ -105752,17 +114905,9 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "building_blocks", - "level1": "toys", - "level2": "building_blocks", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", + "object_name": "cabinet", + "level1": "home_storage", + "level2": "cabinet", "level3": null, "level4": null, "level5": null @@ -105770,7 +114915,7 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "take the building blocks off the white plate by hands." + "close the door by hand." ], "sub_tasks": [ { @@ -105778,19 +114923,19 @@ "subtask_index": 0 }, { - "subtask": "Place red rectangular block on the table with the left gripper", + "subtask": "Touch the right cabinet door with the right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the red rectangular block on the plate with the left gripper", + "subtask": "Close the door with the right gripper", "subtask_index": 2 }, { - "subtask": "Grasp the green rectangular block on the plate with the right gripper", + "subtask": "Touch the left cabinet door with the left gripper", "subtask_index": 3 }, { - "subtask": "Place green rectangular block on the table with the right gripper", + "subtask": "Close the door with the left gripper", "subtask_index": 4 }, { @@ -105799,9 +114944,7 @@ } ], "atomic_actions": [ - "grasp", - "pick", - "place" + "push" ], "robot_name": [ "Airbot_MMK2" @@ -105835,23 +114978,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 47, - "total_frames": 10270, + "total_episodes": 50, + "total_frames": 8141, "fps": 30, "total_tasks": 6, - "total_videos": 188, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "334.13 MB" + "dataset_size": "280.57 MB" }, - "frame_num": 10270, - "dataset_size": "334.13 MB", - "data_structure": "Airbot_MMK2_take_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 8141, + "dataset_size": "280.57 MB", + "data_structure": "Airbot_MMK2_close_doors_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:46" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -106205,102 +115348,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "G1edu-u3_pick_up_the_toy_ai": { - "path": "G1edu-u3_pick_up_the_toy_ai", - "dataset_name": "pick_up_the_toy_ai", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick" - ], - "tasks": "End", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "toy", - "level1": "toy", - "level2": "toy", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-3242", - "dataset_size": "44.8MB", - "statistics": { - "total_episodes": 10, - "total_frames": 3242, - "total_tasks": 1, - "total_videos": 10, - "total_chunks": 1, - "chunks_size": 10, - "fps": 30 - }, - "dataset_uuid": "804260a5-c07f-432e-9080-8112b837464a", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "End", - "Grasp the toy bear and lift it to the center of the view with right gripper", - "Grasp the toy bear and lift it to the center of the view with left gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_pick_up_the_toy_ai_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_pick_up_the_toy_ai_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Airbot_MMK2_move_cake": { + "Airbot_MMK2_take_cake_both_hands": { "task_categories": [ "robotics" ], @@ -106330,11 +115378,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_move_cake", + "dataset_name": "Airbot_MMK2_take_cake_both_hands", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "kitchen", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -106342,17 +115390,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "shelf", - "level1": "home_storage", - "level2": "shelf", + "object_name": "chocolate_cake", + "level1": "snacks", + "level2": "chocolate_cake", "level3": null, "level4": null, "level5": null }, { - "object_name": "cake", - "level1": "bread", - "level2": "cake", + "object_name": "lid", + "level1": "storage_utensils", + "level2": "lid", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null @@ -106360,56 +115416,32 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "place two cakes on the shelf with each hand respectively." + "take the cake out of the basket with both hands and place it on the table." ], "sub_tasks": [ { - "subtask": "Place the ice cream into the plate with the right gripper", + "subtask": "Place the cake on the table with the right gripper", "subtask_index": 0 }, { - "subtask": "Place the cake onto the block toy with the right gripper", + "subtask": "Place the cake on the table with the left gripper", "subtask_index": 1 }, { - "subtask": "Grasp the cake with the left gripper", + "subtask": "Grasp the cake from the white basket with the left gripper", "subtask_index": 2 }, { - "subtask": "Place the cake on the yellow cube block with the left gripper", + "subtask": "Grasp the cake from the white basket with the right gripper", "subtask_index": 3 }, - { - "subtask": "Place the cake on the blue cube block with the right gripper", - "subtask_index": 4 - }, - { - "subtask": "Place the cake onto the block toy with the left gripper", - "subtask_index": 5 - }, - { - "subtask": "Grasp the cake with the right gripper", - "subtask_index": 6 - }, - { - "subtask": "Static", - "subtask_index": 7 - }, - { - "subtask": "Grasp the cake from the table and with the right gripper", - "subtask_index": 8 - }, - { - "subtask": "Grasp the cake from the table and with the left gripper", - "subtask_index": 9 - }, { "subtask": "End", - "subtask_index": 10 + "subtask_index": 4 }, { "subtask": "null", - "subtask_index": 11 + "subtask_index": 5 } ], "atomic_actions": [ @@ -106449,23 +115481,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 94, - "total_frames": 22873, + "total_episodes": 48, + "total_frames": 6044, "fps": 30, - "total_tasks": 12, - "total_videos": 376, + "total_tasks": 6, + "total_videos": 192, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "1009.39 MB" + "dataset_size": "232.01 MB" }, - "frame_num": 22873, - "dataset_size": "1009.39 MB", - "data_structure": "Airbot_MMK2_move_cake_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (82 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 6044, + "dataset_size": "232.01 MB", + "data_structure": "Airbot_MMK2_take_cake_both_hands_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:93" + "train": "0:47" }, "features": { "observation.images.cam_head_rgb": { @@ -106777,486 +115809,62 @@ ], "dtype": "int32" }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - } - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" - } - ] - }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" - }, - "AIRBOT_MMK2_store_coffee_cups": { - "path": "AIRBOT_MMK2_store_coffee_cups", - "dataset_name": "store_coffee_cups", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place", - "pick" - ], - "tasks": "Grasp the cup the right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "coffee_cup", - "level1": "container", - "level2": "coffee_cup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "basin", - "level1": "container", - "level2": "basin", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-11166", - "dataset_size": "444.8MB", - "statistics": { - "total_episodes": 50, - "total_frames": 11166, - "total_tasks": 1, - "total_videos": 200, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "38759f00-3a88-419c-b74b-d8a66882a254", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the cup the right gripper", - "place the cup in the basin use the left gripper", - "End", - "Grasp the cup the left gripper", - "place the cup in the basin use the right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_store_coffee_cups_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_store_coffee_cups_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "leju_robot_hotel_services_e": { - "path": "leju_robot_hotel_services_e", - "dataset_name": "hotel_services_e", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Hand the room key to the person.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "home_storage", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "card", - "level1": "nfc", - "level2": "card", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "sensor", - "level1": "electronic_products", - "level2": "sensor", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-212885", - "dataset_size": "14.2GB", - "statistics": { - "total_episodes": 309, - "total_frames": 212885, - "total_tasks": 1, - "total_videos": 927, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "4d57f5ee-7850-4fb4-8532-415c1821c756", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Hand the room key to the person.", - "Place the ID card on the card reader.", - "Take the ID card from the person's hand.", - "Pick up the room key from the key card box.", - "Hand the ID card to the person.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_hotel_services_e_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_hotel_services_e_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AgiBot-g1_picks_up_battery_a": { - "path": "AgiBot-g1_picks_up_battery_a", - "dataset_name": "picks_up_battery_a", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "place", - "pick", - "grasp" - ], - "tasks": "Place the power supply on the operating table.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "carton", - "level1": "container", - "level2": "carton", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "battery", - "level1": "tool", - "level2": "battery", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-214263", - "dataset_size": "119.7GB", - "statistics": { - "total_episodes": 562, - "total_frames": 214263, - "total_tasks": 1, - "total_videos": 4496, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "3385f559-eb6d-46ff-94d3-a6b48d250220", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the power supply on the operating table.", - "Grab and lift the power supply from the large box.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AgiBot-g1_picks_up_battery_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_picks_up_battery_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AgiBot-g1_mobile_accessory_storage_box_a": { - "path": "AgiBot-g1_mobile_accessory_storage_box_a", - "dataset_name": "mobile_accessory_storage_box_a", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place", - "pick" - ], - "tasks": "Place the open mouse box onto the operation table.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "container", - "level2": "box", - "level3": null, - "level4": null, - "level5": null + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" }, - { - "object_name": "accessories", - "level1": "electronic_products", - "level2": "accessories", - "level3": null, - "level4": null, - "level5": null + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" } - ], - "operation_platform_height": null, - "frame_range": "0-40631", - "dataset_size": "16.7GB", - "statistics": { - "total_episodes": 81, - "total_frames": 40631, - "total_tasks": 1, - "total_videos": 648, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "811a660c-6371-490a-9141-3019afa0cac2", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the open mouse box onto the operation table.", - "Grab and pick up the open mouse box from the accessory packaging area.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" }, "authors": { "contributed_by": [ { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" } ] }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AgiBot-g1_mobile_accessory_storage_box_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_mobile_accessory_storage_box_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Cobot_Magic_pushing_magnet": { - "path": "Cobot_Magic_pushing_magnet", - "dataset_name": "pushing_magnet", + "leju_robot_moving_parts_u": { + "path": "leju_robot_moving_parts_u", + "dataset_name": "moving_parts_u", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "five_finger_hand" ], "scene_type": [], "atomic_actions": [ - "push" + "grasp", + "pick", + "place" ], - "tasks": "End", + "tasks": "Place the black part on the table with right gripper", "objects": [ { "object_name": "table", @@ -107267,27 +115875,35 @@ "level5": null }, { - "object_name": "magnet", - "level1": "tool", - "level2": "magnet", + "object_name": "parts", + "level1": "mechanical_parts", + "level2": "parts", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cabinet", + "level1": "home_storage", + "level2": "cabinet", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-23553", - "dataset_size": "423.5MB", + "operation_platform_height": null, + "frame_range": "0-149345", + "dataset_size": "10.4GB", "statistics": { "total_episodes": 100, - "total_frames": 23553, + "total_frames": 149345, "total_tasks": 1, "total_videos": 300, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "ad07d99e-6bee-4147-b81a-8f2dfe943ed8", + "dataset_uuid": "5138770c-0dd3-4a50-a8a2-efa57aabc3e9", "language": [ "en", "zh" @@ -107296,8 +115912,14 @@ "robotics" ], "sub_tasks": [ + "Place the black part on the table with right gripper", + "Pick up the large material from the shelf.", + "Insert the large material into the corresponding slot on the workbench.", "End", - "Push the magnet on the right to connect the magnet on the left", + "Grasp the black part with right gripper", + "Move the large material to the workbench.", + "Move to the initial position of the shelf.", + "Move to the table behind body", "null" ], "annotations": { @@ -107335,10 +115957,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Cobot_Magic_pushing_magnet_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_pushing_magnet_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "leju_robot_moving_parts_u_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_moving_parts_u_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Realman_RMC-AIDA-L_storage_peach_box": { + "Galaxea_R1_Lite_classify_object_three": { "task_categories": [ "robotics" ], @@ -107368,11 +115990,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Realman_RMC-AIDA-L_storage_peach_box", + "dataset_name": "Galaxea_R1_Lite_classify_object_three", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "kitchen", + "level1": "commercial_convenience", + "level2": "supermarket", "level3": null, "level4": null, "level5": null @@ -107380,58 +116002,314 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "brown_basket", + "level1": "baskets", + "level2": "brown_basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "black_box", - "level1": "home_storage", - "level2": "black_box", + "object_name": "yellow_basket", + "level1": "baskets", + "level2": "yellow_basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "peach", - "level1": "fruit", - "level2": "peach", + "object_name": "any_fruits", + "level1": "fruits", + "level2": "any_fruits", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "any_vegetables", + "level1": "vegetables", + "level2": "any_vegetables", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "any_snacks", + "level1": "snacks", + "level2": "any_snacks", "level3": null, "level4": null, "level5": null + }, + { + "object_name": "any_bread", + "level1": "foobreadd", + "level3": "any_bread", + "level2": null, + "level4": null, + "level5": null } ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "the left gripper open the box, the right gripper grasp the peach on the table and place it into the box." + "place the food in the right basket with the right gripper, and place the non food items in the left basket with the left gripper." ], "sub_tasks": [ { - "subtask": "Close the box with the left gripper", + "subtask": "Grasp the rubiks cube and put it in the left basket", "subtask_index": 0 }, { - "subtask": "Open the lid with the left gripper", + "subtask": "Place the tape in the center of the table", "subtask_index": 1 }, { - "subtask": "End", + "subtask": "Grasp the soft cleanser and put it in the left basket", "subtask_index": 2 }, { - "subtask": "Grasp the peach with the right gripper", + "subtask": "Grasp the back scratcher and put it in the left basket", "subtask_index": 3 }, { - "subtask": "Place the peach into the box with the right gripper", + "subtask": "Grasp the apple and put it in the right basket", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "Grasp the yellow marker and put it in the left basket", "subtask_index": 5 + }, + { + "subtask": "End", + "subtask_index": 6 + }, + { + "subtask": "Grasp the white eraser and put it in the left basket", + "subtask_index": 7 + }, + { + "subtask": "Grasp the power strip and put it in the left basket", + "subtask_index": 8 + }, + { + "subtask": "Grasp the square chewing gum and put it in the right basket", + "subtask_index": 9 + }, + { + "subtask": "Grasp the cleaning agent and put it in the left basket", + "subtask_index": 10 + }, + { + "subtask": "Grasp the blue marker pen and put it in the right basket", + "subtask_index": 11 + }, + { + "subtask": "Grasp the soda water and put it in the right basket", + "subtask_index": 12 + }, + { + "subtask": "Grasp the spoon and put it in the left basket", + "subtask_index": 13 + }, + { + "subtask": "Grasp the duck toys and put it in the left basket", + "subtask_index": 14 + }, + { + "subtask": "Grasp the blue marker pen and put it in the left basket", + "subtask_index": 15 + }, + { + "subtask": "Grasp the shampoo and put it in the left basket", + "subtask_index": 16 + }, + { + "subtask": "Grasp the triangle cake and put it in the right basket", + "subtask_index": 17 + }, + { + "subtask": "Grasp the brown plate and put it in the left basket", + "subtask_index": 18 + }, + { + "subtask": "Grasp the cookie and put it in the right basket", + "subtask_index": 19 + }, + { + "subtask": "Grasp the yellow cake and put it in the right basket", + "subtask_index": 20 + }, + { + "subtask": "Grasp the shower sphere and put it in the left basket", + "subtask_index": 21 + }, + { + "subtask": "Grasp the orange and put it in the right basket", + "subtask_index": 22 + }, + { + "subtask": "Grasp the compass and put it in the left basket", + "subtask_index": 23 + }, + { + "subtask": "Grasp the round bread and put it in the right basket", + "subtask_index": 24 + }, + { + "subtask": "Grasp the lemon and put it in the right basket", + "subtask_index": 25 + }, + { + "subtask": "Grasp the egg yolk pastry and put it in the right basket", + "subtask_index": 26 + }, + { + "subtask": "Grasp the soap and put it in the left basket", + "subtask_index": 27 + }, + { + "subtask": "Grasp the washing liquid and put it in the left basket", + "subtask_index": 28 + }, + { + "subtask": "Grasp the hard cleanser and put it in the left basket", + "subtask_index": 29 + }, + { + "subtask": "Grasp the milk and put it in the right basket", + "subtask_index": 30 + }, + { + "subtask": "Grasp the black marker and put it in the left basket", + "subtask_index": 31 + }, + { + "subtask": "Grasp the banana and put it in the right basket", + "subtask_index": 32 + }, + { + "subtask": "Grasp the black glass cup and put it in the left basket", + "subtask_index": 33 + }, + { + "subtask": "Grasp the blue marker and put it in the right basket", + "subtask_index": 34 + }, + { + "subtask": "Grasp the bath ball and put it in the left basket", + "subtask_index": 35 + }, + { + "subtask": "Abnormal", + "subtask_index": 36 + }, + { + "subtask": "Grasp the peeler and put it in the left basket", + "subtask_index": 37 + }, + { + "subtask": "Grasp the brown towel and put it in the left basket", + "subtask_index": 38 + }, + { + "subtask": "Grasp the peach and put it in the right basket", + "subtask_index": 39 + }, + { + "subtask": "Grasp the tea cup and put it in the left basket", + "subtask_index": 40 + }, + { + "subtask": "Grasp the brush and put it in the left basket", + "subtask_index": 41 + }, + { + "subtask": "Grasp the chocolate and put it in the right basket", + "subtask_index": 42 + }, + { + "subtask": "Grasp the grey towel and put it in the left basket", + "subtask_index": 43 + }, + { + "subtask": "Place the peach doll in the center of the table", + "subtask_index": 44 + }, + { + "subtask": "Grasp the rubiks cube and put it in the right basket", + "subtask_index": 45 + }, + { + "subtask": "Grasp the tape and put it in the left basket", + "subtask_index": 46 + }, + { + "subtask": "Grasp the bread slice and put it in the right basket", + "subtask_index": 47 + }, + { + "subtask": "Grasp the glasses case and put it in the left basket", + "subtask_index": 48 + }, + { + "subtask": "Grasp the soda water and put it in the left basket", + "subtask_index": 49 + }, + { + "subtask": "Grasp the peach doll and put it in the right basket", + "subtask_index": 50 + }, + { + "subtask": "Grasp the blue cup and put it in the left basket", + "subtask_index": 51 + }, + { + "subtask": "Grasp the spoon and put it in the right basket", + "subtask_index": 52 + }, + { + "subtask": "Grasp the pen container and put it in the left basket", + "subtask_index": 53 + }, + { + "subtask": "Grasp the red duck and put it in the left basket", + "subtask_index": 54 + }, + { + "subtask": "Grasp the glasses case and put it in the right basket", + "subtask_index": 55 + }, + { + "subtask": "Grasp the long bread and put it in the right basket", + "subtask_index": 56 + }, + { + "subtask": "Grasp the yogurt and put it in the right basket", + "subtask_index": 57 + }, + { + "subtask": "Grasp the potato chips and put it in the right basket", + "subtask_index": 58 + }, + { + "subtask": "Grasp the can and put it in the right basket", + "subtask_index": 59 + }, + { + "subtask": "Grasp the long bread and put it in the left basket", + "subtask_index": 60 + }, + { + "subtask": "Grasp the yellow duck and put it in the left basket", + "subtask_index": 61 + }, + { + "subtask": "Grasp the coke and put it in the right basket", + "subtask_index": 62 + }, + { + "subtask": "null", + "subtask_index": 63 } ], "atomic_actions": [ @@ -107440,25 +116318,27 @@ "place" ], "robot_name": [ - "Realman_RMC-AIDA-L" + "Galaxea_R1_Lite" ], "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -107469,30 +116349,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 118, - "total_frames": 78472, + "total_episodes": 197, + "total_frames": 134891, "fps": 30, - "total_tasks": 6, - "total_videos": 354, + "total_tasks": 64, + "total_videos": 788, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 28, - "action_dim": 28, - "camera_views": 3, - "dataset_size": "751.68 MB" + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "7.32 GB" }, - "frame_num": 78472, - "dataset_size": "751.68 MB", - "data_structure": "Realman_RMC-AIDA-L_storage_peach_box_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (106 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 134891, + "dataset_size": "7.32 GB", + "data_structure": "Galaxea_R1_Lite_classify_object_three_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (185 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:117" + "train": "0:196" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -107501,8 +116381,31 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -107514,8 +116417,8 @@ "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -107524,8 +116427,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -107537,8 +116440,8 @@ "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -107547,8 +116450,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -107560,73 +116463,45 @@ "observation.state": { "dtype": "float32", "shape": [ - 28 + 14 ], "names": [ - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "right_arm_joint_7_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad", "left_arm_joint_1_rad", "left_arm_joint_2_rad", "left_arm_joint_3_rad", "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_arm_joint_7_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad" + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 28 + 14 ], "names": [ - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "right_arm_joint_7_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad", "left_arm_joint_1_rad", "left_arm_joint_2_rad", "left_arm_joint_3_rad", "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_arm_joint_7_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad" + "right_gripper_open" ] }, "timestamp": { @@ -107858,216 +116733,11 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" - }, - "leju_robot_moving_parts_b": { - "path": "leju_robot_moving_parts_b", - "dataset_name": "moving_parts_b", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Insert the small parts into the corresponding holes on the workbench.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "parts", - "level1": "mechanical_parts", - "level2": "parts", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cabinet", - "level1": "home_storage", - "level2": "cabinet", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-708011", - "dataset_size": "43.3GB", - "statistics": { - "total_episodes": 490, - "total_frames": 708011, - "total_tasks": 1, - "total_videos": 1470, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "b23f0a82-6d8f-4f98-b818-5e10125816cc", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Insert the small parts into the corresponding holes on the workbench.", - "End", - "Place the white part on the table with left gripper", - "Grasp the white part with left gripper", - "Move the small parts to the workbench.", - "Move to the table behind body", - "Take small components from the rack.", - "Pick up small parts from the shelf.", - "Move to the initial position of the workbench.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_moving_parts_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_moving_parts_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "G1edu-u3_place_plastic_bowl_ah": { - "path": "G1edu-u3_place_plastic_bowl_ah", - "dataset_name": "place_plastic_bowl_ah", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "pick", - "place" - ], - "tasks": "Place the plastic bowl on the table with right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plastic_bowl", - "level1": "container", - "level2": "plastic_bowl", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-5638", - "dataset_size": "83.7MB", - "statistics": { - "total_episodes": 33, - "total_frames": 5638, - "total_tasks": 1, - "total_videos": 33, - "total_chunks": 1, - "chunks_size": 34, - "fps": 30 - }, - "dataset_uuid": "7153928d-6c50-4662-91a7-9d48d64e1e5d", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the plastic bowl on the table with right gripper", - "End", - "Place the plastic bowl on the table with left gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_place_plastic_bowl_ah_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_place_plastic_bowl_ah_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "AgiBot-g1_battery_storage_b": { - "path": "AgiBot-g1_battery_storage_b", - "dataset_name": "battery_storage_b", + "RMC-AIDA-L_clean_table": { + "path": "RMC-AIDA-L_clean_table", + "dataset_name": "clean_table", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -108075,10 +116745,11 @@ "scene_type": [], "atomic_actions": [ "grasp", + "pick", "place", - "pick" + "wipe" ], - "tasks": "Place the power supply on the operating table.", + "tasks": "Place the rag on the table with the right gripper", "objects": [ { "object_name": "table", @@ -108089,252 +116760,51 @@ "level5": null }, { - "object_name": "box", - "level1": "container", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "battery", - "level1": "electronic_products", - "level2": "battery", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-57968", - "dataset_size": "29.6GB", - "statistics": { - "total_episodes": 115, - "total_frames": 57968, - "total_tasks": 1, - "total_videos": 920, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "632c0e2b-a8bb-4e12-9635-82df87be2096", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the power supply on the operating table.", - "Grab and lift the power supply from the large box.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AgiBot-g1_battery_storage_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_battery_storage_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "R1_Lite_throw_out_the_trash": { - "path": "R1_Lite_throw_out_the_trash", - "dataset_name": "throw_out_the_trash", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "End", - "objects": [ - { - "object_name": "trash_can", - "level1": "container", - "level2": "trash_can", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "garbage_bag", + "object_name": "cup", "level1": "container", - "level2": "garbage_bag", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "the_door", - "level1": "furniture", - "level2": "the_door", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-143720", - "dataset_size": "6.0GB", - "statistics": { - "total_episodes": 109, - "total_frames": 143720, - "total_tasks": 1, - "total_videos": 327, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "1aceeebe-3221-4690-8d72-ca6f9ec512eb", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "End", - "Place the outer trash can on the ground", - "Put the trash bag at the door", - "Pick up the trash bag", - "abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_throw_out_the_trash_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_throw_out_the_trash_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Cobot_Magic_the_box_stores_table_tennis_balls": { - "path": "Cobot_Magic_the_box_stores_table_tennis_balls", - "dataset_name": "the_box_stores_table_tennis_balls", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place", - "pick" - ], - "tasks": "use the right arm to clamp the table tennis ball closest to it", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "level2": "cup", "level3": null, "level4": null, "level5": null }, { - "object_name": "box", - "level1": "container", - "level2": "box", + "object_name": "blue_rag", + "level1": "clothing", + "level2": "blue_rag", "level3": null, "level4": null, "level5": null }, { - "object_name": "table_tennis", - "level1": "toy", - "level2": "table_tennis", + "object_name": "pruple_rag", + "level1": "clothing", + "level2": "pruple_rag", "level3": null, "level4": null, "level5": null }, { - "object_name": "tablecloth", - "level1": "clothing", - "level2": "tablecloth", + "object_name": "water", + "level1": "drink", + "level2": "water", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-438756", - "dataset_size": "19.9GB", + "operation_platform_height": 77.2, + "frame_range": "0-514178", + "dataset_size": "3.5GB", "statistics": { - "total_episodes": 477, - "total_frames": 438756, - "total_tasks": 5, - "total_videos": 1431, + "total_episodes": 776, + "total_frames": 514178, + "total_tasks": 4, + "total_videos": 2328, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "4af013a1-8691-4531-a11d-4a02b8ba9fdf", + "dataset_uuid": "ed026bcf-f342-4259-9fce-12208d2ca5b3", "language": [ "en", "zh" @@ -108343,10 +116813,18 @@ "robotics" ], "sub_tasks": [ - "use the right arm to clamp the table tennis ball closest to it", - "use the right arm to put the table tennis ball into the box", - "use the left arm to clamp the table tennis ball closest to it", - "use the left arm to put the table tennis ball into the box", + "Place the rag on the table with the right gripper", + "Stand the paper cup upright with the left gripper", + "Grasp the paper cup with the left gripper", + "Grasp the paper cup with the right gripper", + "Grasp the rag with the left gripper", + "Grasp the rag with the right gripper", + "Wipe the stains off the table with the left gripper", + "Place the rag on the table with the left gripper", + "Stand the paper cup upright with the right gripper", + "end", + "Wipe the stains off the table with the right gripper", + "abnormal", "null" ], "annotations": { @@ -108384,25 +116862,23 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Cobot_Magic_the_box_stores_table_tennis_balls_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_the_box_stores_table_tennis_balls_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_front_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "RMC-AIDA-L_clean_table_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_clean_table_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "R1_Lite_take_and_put_away_items": { - "path": "R1_Lite_take_and_put_away_items", - "dataset_name": "take_and_put_away_items", + "leju_robot_hotel_services_b": { + "path": "leju_robot_hotel_services_b", + "dataset_name": "hotel_services_b", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "five_finger_hand" ], "scene_type": [], "atomic_actions": [ "grasp", "pick", - "place", - "pull", - "push" + "place" ], - "tasks": "Take the mineral water out of the drawer and put it on the table", + "tasks": "Hand the room key to the person.", "objects": [ { "object_name": "table", @@ -108413,59 +116889,43 @@ "level5": null }, { - "object_name": "drawer", - "level1": "container", - "level2": "drawer", + "object_name": "box", + "level1": "home_storage", + "level2": "box", "level3": null, "level4": null, "level5": null }, { - "object_name": "umbrella", - "level1": "daily_necessities", - "level2": "umbrella", + "object_name": "card", + "level1": "nfc", + "level2": "card", "level3": null, "level4": null, "level5": null }, { - "object_name": "data_cable", + "object_name": "sensor", "level1": "electronic_products", - "level2": "data_cable", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mineral_water", - "level1": "beverages", - "level2": "mineral_water", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "storage_box", - "level1": "container", - "level2": "storage_box", + "level2": "sensor", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-137069", - "dataset_size": "6.9GB", + "frame_range": "0-209703", + "dataset_size": "14.0GB", "statistics": { - "total_episodes": 112, - "total_frames": 137069, + "total_episodes": 288, + "total_frames": 209703, "total_tasks": 1, - "total_videos": 336, + "total_videos": 864, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "fa3f7148-5f74-444e-82a5-9f8b31a69987", + "dataset_uuid": "1652000d-29c5-40d4-9d8d-3198ed81edbb", "language": [ "en", "zh" @@ -108474,16 +116934,11 @@ "robotics" ], "sub_tasks": [ - "Take the mineral water out of the drawer and put it on the table", - "Open the drawer", - "Take the data cable out of the drawer and put it on the table", - "Take the umbrella out of the drawer and put it on the table", - "Take the storage box out of the drawer and put it on the table", - "Put the storage box in the drawer", - "Close the drawer", - "Put the umbrella in the drawer", - "Put the data cable in the drawer", - "Put the mineral water in the drawer", + "Hand the room key to the person.", + "Place the ID card on the card reader.", + "Take the ID card from the person's hand.", + "Pick up the room key from the key card box.", + "Hand the ID card to the person.", "null" ], "annotations": { @@ -108521,128 +116976,608 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_take_and_put_away_items_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_take_and_put_away_items_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "leju_robot_hotel_services_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_hotel_services_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "RMC-AIDA-L_basket_storage_egg_yolk_pastry": { - "path": "RMC-AIDA-L_basket_storage_egg_yolk_pastry", - "dataset_name": "basket_storage_egg_yolk_pastry", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" + "Agilex_Cobot_Magic_storage_object_closest_apple": { + "task_categories": [ + "robotics" ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" + "language": [ + "en" ], - "tasks": "Place the basket in the center of view with left gripper", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_storage_object_closest_apple", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "office_workspace", + "level2": "office", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { "object_name": "table", - "level1": "furniture", + "level1": "home_storage", "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "basket", - "level1": "container", - "level2": "basket", + "object_name": "brown_basket", + "level1": "home_storage", + "level2": "brown_basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "egg_yolk_pastry", + "object_name": "mango", "level1": "food", - "level2": "egg_yolk_pastry", + "level2": "mango", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "apple", + "level1": "food", + "level2": "apple", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rubik's_cube", + "level1": "toys", + "level2": "rubik's_cube", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "whiteboard_erasers", + "level1": "stationery", + "level2": "whiteboard_erasers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_necessities", + "level2": "bathing_in_flowers", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-295062", - "dataset_size": "3.5GB", + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use a picker to grab the item closest to the apple and place it in the basket." + ], + "sub_tasks": [ + { + "subtask": "Grasp the Rubik's Cube with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "Grasp the chalkboard eraser with the right gripper", + "subtask_index": 1 + }, + { + "subtask": "Place the chalkboard eraser into the basket with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the shower sphere into the basket with the left gripper", + "subtask_index": 3 + }, + { + "subtask": "Place the mango into the basket with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "Place the Rubik's Cube into the basket with the left gripper", + "subtask_index": 5 + }, + { + "subtask": "Place the shower sphere into the basket with the right gripper", + "subtask_index": 6 + }, + { + "subtask": "Grasp the mango with the right gripper", + "subtask_index": 7 + }, + { + "subtask": "Grasp the Rubik's Cube with the right gripper", + "subtask_index": 8 + }, + { + "subtask": "End", + "subtask_index": 9 + }, + { + "subtask": "Place the Rubik's Cube into the basket with the right gripper", + "subtask_index": 10 + }, + { + "subtask": "Grasp the mango with the left gripper", + "subtask_index": 11 + }, + { + "subtask": "Place the mango into the basket with the left gripper", + "subtask_index": 12 + }, + { + "subtask": "Grasp the shower sphere with the right gripper", + "subtask_index": 13 + }, + { + "subtask": "Place the chalkboard eraser into the basket with the left gripper", + "subtask_index": 14 + }, + { + "subtask": "Grasp the shower sphere with the left gripper", + "subtask_index": 15 + }, + { + "subtask": "Grasp the chalkboard eraser with the left gripper", + "subtask_index": 16 + }, + { + "subtask": "null", + "subtask_index": 17 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], "statistics": { - "total_episodes": 727, - "total_frames": 295062, - "total_tasks": 4, - "total_videos": 2181, + "total_episodes": 49, + "total_frames": 13231, + "fps": 30, + "total_tasks": 18, + "total_videos": 147, "total_chunks": 1, "chunks_size": 1000, - "fps": 30 + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "189.68 MB" }, - "dataset_uuid": "928d5a84-eae7-4fae-b272-9356c151aa6d", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the basket in the center of view with left gripper", - "Place the egg yolk pastry in the basket with right gripper", - "Move the basket to the center of view with the right gripper", - "Move the basket to the center of view with the left gripper", - "Pick up the egg yolk pastry with the right gripper", - "Place the basket in the center of view with right gripper", - "Pick up the basket with the left gripper", - "Place the basket in the center of view with the left gripper", - "Grasp egg yolk pastry with left gripper", - "Place the egg yolk pastry into the basket with the right gripper", - "Grasp the basket with right gripper", - "End", - "Grasp egg yolk pastry with right gripper", - "Place the egg yolk pastry in the basket with left gripper", - "Grasp the basket with left gripper", - "Place the egg yolk pastry into the basket with the left gripper", - "Pick up the egg yolk pastry with the left gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" + "frame_num": 13231, + "dataset_size": "189.68 MB", + "data_structure": "Agilex_Cobot_Magic_storage_object_closest_apple_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:48" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] + }, + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } }, "authors": { "contributed_by": [ { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" } ] }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "RMC-AIDA-L_basket_storage_egg_yolk_pastry_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_basket_storage_egg_yolk_pastry_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Airbot_MMK2_close_doors": { + "Airbot_MMK2_push_away_book": { "task_categories": [ "robotics" ], @@ -108672,11 +117607,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_close_doors", + "dataset_name": "Airbot_MMK2_push_away_book", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "studroom", + "level2": "study_room", "level3": null, "level4": null, "level5": null @@ -108684,9 +117619,9 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "cabinet", - "level1": "home_storage", - "level2": "cabinet", + "object_name": "book", + "level1": "stationery", + "level2": "book", "level3": null, "level4": null, "level5": null @@ -108694,7 +117629,7 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "close the door by hand." + "pull out a book by hand." ], "sub_tasks": [ { @@ -108702,28 +117637,26 @@ "subtask_index": 0 }, { - "subtask": "Touch the right cabinet door with the right gripper", + "subtask": "Abnormal", "subtask_index": 1 }, { - "subtask": "Close the door with the right gripper", + "subtask": "Lay the book down with the right gripper", "subtask_index": 2 }, { - "subtask": "Touch the left cabinet door with the left gripper", + "subtask": "Hold the book with the right gripper", "subtask_index": 3 }, - { - "subtask": "Close the door with the left gripper", - "subtask_index": 4 - }, { "subtask": "null", - "subtask_index": 5 + "subtask_index": 4 } ], "atomic_actions": [ - "push" + "pinch", + "clip", + "place" ], "robot_name": [ "Airbot_MMK2" @@ -108757,23 +117690,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 8141, + "total_episodes": 244, + "total_frames": 36575, "fps": 30, - "total_tasks": 6, - "total_videos": 200, + "total_tasks": 5, + "total_videos": 976, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "280.57 MB" + "dataset_size": "1.40 GB" }, - "frame_num": 8141, - "dataset_size": "280.57 MB", - "data_structure": "Airbot_MMK2_close_doors_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 36575, + "dataset_size": "1.40 GB", + "data_structure": "Airbot_MMK2_push_away_book_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (232 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:49" + "train": "0:243" }, "features": { "observation.images.cam_head_rgb": { @@ -109127,7 +118060,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_take_cake_both_hands": { + "Galaxea_R1_Lite_storage_object_dish": { "task_categories": [ "robotics" ], @@ -109141,53 +118074,245 @@ "license": "apache-2.0", "configs": [ { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } - ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Galaxea_R1_Lite_storage_object_dish", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "dish", + "level1": "plates", + "level2": "dish", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "fruits", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_necessities", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_cup", + "level1": "cups", + "level2": "blue_cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_pot", + "level1": "kitchen_supplies", + "level2": "blue_pot", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "toast_slices", + "level1": "bread", + "level2": "toast_slices", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_towel", + "level1": "towels", + "level2": "brown_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "can", + "level1": "snacks", + "level2": "can", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "coke", + "level1": "beverages", + "level2": "coke", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "potato_chips", + "level1": "snacks", + "level2": "potato_chips", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chocolate", + "level1": "snacks", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "compass", + "level1": "stationery", + "level2": "compass", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "block_pillar", + "level1": "toys", + "level2": "block_pillar", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "egg_beater", + "level1": "kitchen_supplies", + "level2": "egg_beater", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eraser", + "level1": "stationery", + "level2": "eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "snacks", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_chemical_products", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_lemon", + "level1": "fruits", + "level2": "green_lemon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "fruits", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "power_strip", + "level1": "electrical_control_equipment", + "level2": "power_strip", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "round_bread", + "level1": "bread", + "level2": "round_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_chemical_products", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "square_building_blocks", + "level1": "toys", + "level2": "square_building_blocks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tape", + "level1": "stationery", + "level2": "tape", + "level3": null, + "level4": null, + "level5": null }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_take_cake_both_hands", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "household", - "level2": "living_room", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ { - "object_name": "chocolate_cake", - "level1": "snacks", - "level2": "chocolate_cake", + "object_name": "cake", + "level1": "bread", + "level2": "cake", "level3": null, "level4": null, "level5": null }, { - "object_name": "lid", - "level1": "storage_utensils", - "level2": "lid", + "object_name": "duck", + "level1": "doll", + "level2": "duck", "level3": null, "level4": null, "level5": null }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "ambrosial_yogurt", + "level1": "beverages", + "level2": "ambrosial_yogurt", "level3": null, "level4": null, "level5": null @@ -109195,32 +118320,368 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "take the cake out of the basket with both hands and place it on the table." + "use a gripper to pick the target object and place on the dish." ], "sub_tasks": [ { - "subtask": "Place the cake on the table with the right gripper", + "subtask": "Grasp the blue pot with the left gripper", "subtask_index": 0 }, { - "subtask": "Place the cake on the table with the left gripper", + "subtask": "Place the back scratcher on the dish with the right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the cake from the white basket with the left gripper", + "subtask": "Grasp the plugboard with the left gripper", "subtask_index": 2 }, { - "subtask": "Grasp the cake from the white basket with the right gripper", + "subtask": "Place the soft facial cleanser on the dish with the right gripper", "subtask_index": 3 }, { - "subtask": "End", + "subtask": "Grasp the potato chips with the right gripper", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "Grasp the banana with the left gripper", "subtask_index": 5 + }, + { + "subtask": "Grasp the compasses with the right gripper", + "subtask_index": 6 + }, + { + "subtask": "Grasp the duck toy with the left gripper", + "subtask_index": 7 + }, + { + "subtask": "Place the round bread on the dish with the left gripper", + "subtask_index": 8 + }, + { + "subtask": "Grasp the blue cup with the left gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the compasses on the dish with the right gripper", + "subtask_index": 10 + }, + { + "subtask": "Place the duck toy on the dish with the right gripper", + "subtask_index": 11 + }, + { + "subtask": "Place the round wooden block on the dish with the right gripper", + "subtask_index": 12 + }, + { + "subtask": "Place the green lemon on the dish with the right gripper", + "subtask_index": 13 + }, + { + "subtask": "Grasp the back scratcher with the right gripper", + "subtask_index": 14 + }, + { + "subtask": "Grasp the square chewing gum with the left gripper", + "subtask_index": 15 + }, + { + "subtask": "Grasp the chocolate cake with the right gripper", + "subtask_index": 16 + }, + { + "subtask": "Grasp the shower sphere with the left gripper", + "subtask_index": 17 + }, + { + "subtask": "Place the peach on the dish with the left gripper", + "subtask_index": 18 + }, + { + "subtask": "Grasp the plugboard with the right gripper", + "subtask_index": 19 + }, + { + "subtask": "Grasp the tin with the left gripper", + "subtask_index": 20 + }, + { + "subtask": "Grasp the brown towel with the left gripper", + "subtask_index": 21 + }, + { + "subtask": "Place the brown towel on the dish with the right gripper", + "subtask_index": 22 + }, + { + "subtask": "Grasp the hard facial cleanser with the left gripper", + "subtask_index": 23 + }, + { + "subtask": "Place the plugboard on the dish with the right gripper", + "subtask_index": 24 + }, + { + "subtask": "Grasp the peach with the right gripper", + "subtask_index": 25 + }, + { + "subtask": "Grasp the brown towel with the right gripper", + "subtask_index": 26 + }, + { + "subtask": "Place the coke on the dish with the right gripper", + "subtask_index": 27 + }, + { + "subtask": "Place the banana on the dish with the left gripper", + "subtask_index": 28 + }, + { + "subtask": "Place the peach on the dish with the right gripper", + "subtask_index": 29 + }, + { + "subtask": "Grasp the hard facial cleanser with the right gripper", + "subtask_index": 30 + }, + { + "subtask": "Place the potato chips on the dish with the right gripper", + "subtask_index": 31 + }, + { + "subtask": "Grasp the peach with the left gripper", + "subtask_index": 32 + }, + { + "subtask": "Grasp the green lemon with the right gripper", + "subtask_index": 33 + }, + { + "subtask": "Place the chocolate cake on the dish with the right gripper", + "subtask_index": 34 + }, + { + "subtask": "Place the plugboard on the dish with the left gripper", + "subtask_index": 35 + }, + { + "subtask": "Grasp the bread slice with the right gripper", + "subtask_index": 36 + }, + { + "subtask": "Place the square chewing gum on the dish with the right gripper", + "subtask_index": 37 + }, + { + "subtask": "Grasp the duck toy with the right gripper", + "subtask_index": 38 + }, + { + "subtask": "End", + "subtask_index": 39 + }, + { + "subtask": "Place the blackboard erasure on the dish with the left gripper", + "subtask_index": 40 + }, + { + "subtask": "Grasp the blackboard erasure with the left gripper", + "subtask_index": 41 + }, + { + "subtask": "Grasp the coke with the left gripper", + "subtask_index": 42 + }, + { + "subtask": "Place the tape on the dish with the right gripper", + "subtask_index": 43 + }, + { + "subtask": "Place the bread slice on the dish with the right gripper", + "subtask_index": 44 + }, + { + "subtask": "Place the shower sphere on the dish with the right gripper", + "subtask_index": 45 + }, + { + "subtask": "Grasp the round wooden block with the left gripper", + "subtask_index": 46 + }, + { + "subtask": "Grasp the compasses with the left gripper", + "subtask_index": 47 + }, + { + "subtask": "Place the banana on the dish with the right gripper", + "subtask_index": 48 + }, + { + "subtask": "Grasp the blue pot with the right gripper", + "subtask_index": 49 + }, + { + "subtask": "Grasp the round bread with the right gripper", + "subtask_index": 50 + }, + { + "subtask": "Grasp the chocolate cake with the left gripper", + "subtask_index": 51 + }, + { + "subtask": "Place the square chewing gum on the dish with the left gripper", + "subtask_index": 52 + }, + { + "subtask": "Place the blue cup on the dish with the right gripper", + "subtask_index": 53 + }, + { + "subtask": "Place the hard facial cleanser on the dish with the right gripper", + "subtask_index": 54 + }, + { + "subtask": "Place the blue cup on the dish with the left gripper", + "subtask_index": 55 + }, + { + "subtask": "Grasp the tape with the right gripper", + "subtask_index": 56 + }, + { + "subtask": "Place the duck toy on the dish with the left gripper", + "subtask_index": 57 + }, + { + "subtask": "Grasp the coke with the right gripper", + "subtask_index": 58 + }, + { + "subtask": "Place the square wooden block on the dish with the right gripper", + "subtask_index": 59 + }, + { + "subtask": "Grasp the square chewing gum with the right gripper", + "subtask_index": 60 + }, + { + "subtask": "Place the round bread on the dish with the right gripper", + "subtask_index": 61 + }, + { + "subtask": "Grasp the shower sphere with the right gripper", + "subtask_index": 62 + }, + { + "subtask": "Place the brown towel on the dish with the left gripper", + "subtask_index": 63 + }, + { + "subtask": "Place the bread slice on the dish with the left gripper", + "subtask_index": 64 + }, + { + "subtask": "Grasp the bread slice with the left gripper", + "subtask_index": 65 + }, + { + "subtask": "Grasp the yogurt with the left gripper", + "subtask_index": 66 + }, + { + "subtask": "Place the blue pot on the dish with the left gripper", + "subtask_index": 67 + }, + { + "subtask": "Grasp the blackboard erasure with the right gripper", + "subtask_index": 68 + }, + { + "subtask": "Place the coke on the dish with the left gripper", + "subtask_index": 69 + }, + { + "subtask": "Place the square wooden block on the dish with the left gripper", + "subtask_index": 70 + }, + { + "subtask": "Place the yogurt on the dish with the left gripper", + "subtask_index": 71 + }, + { + "subtask": "Place the chocolate cake on the dish with the left gripper", + "subtask_index": 72 + }, + { + "subtask": "Place the compasses on the dish with the left gripper", + "subtask_index": 73 + }, + { + "subtask": "Place the round wooden block on the dish with the left gripper", + "subtask_index": 74 + }, + { + "subtask": "Place the chocolate on the dish with the left gripper", + "subtask_index": 75 + }, + { + "subtask": "Grasp the round bread with the left gripper", + "subtask_index": 76 + }, + { + "subtask": "Grasp the chocolate with the left gripper", + "subtask_index": 77 + }, + { + "subtask": "Grasp the square wooden block with the left gripper", + "subtask_index": 78 + }, + { + "subtask": "Grasp the blue cup with the right gripper", + "subtask_index": 79 + }, + { + "subtask": "Grasp the soft facial cleanser with the right gripper", + "subtask_index": 80 + }, + { + "subtask": "Place the shower sphere on the dish with the left gripper", + "subtask_index": 81 + }, + { + "subtask": "Place the tin on the dish with the left gripper", + "subtask_index": 82 + }, + { + "subtask": "Grasp the square wooden block with the right gripper", + "subtask_index": 83 + }, + { + "subtask": "Place the hard facial cleanser on the dish with the left gripper", + "subtask_index": 84 + }, + { + "subtask": "Place the blackboard erasure on the dish with the right gripper", + "subtask_index": 85 + }, + { + "subtask": "Place the blue pot on the dish with the right gripper", + "subtask_index": 86 + }, + { + "subtask": "Grasp the round wooden block with the right gripper", + "subtask_index": 87 + }, + { + "subtask": "Grasp the banana with the right gripper", + "subtask_index": 88 + }, + { + "subtask": "null", + "subtask_index": 89 } ], "atomic_actions": [ @@ -109229,21 +118690,21 @@ "place" ], "robot_name": [ - "Airbot_MMK2" + "Galaxea_R1_Lite" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -109260,30 +118721,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 48, - "total_frames": 6044, + "total_episodes": 101, + "total_frames": 26346, "fps": 30, - "total_tasks": 6, - "total_videos": 192, + "total_tasks": 90, + "total_videos": 404, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, + "state_dim": 14, + "action_dim": 14, "camera_views": 4, - "dataset_size": "232.01 MB" + "dataset_size": "993.15 MB" }, - "frame_num": 6044, - "dataset_size": "232.01 MB", - "data_structure": "Airbot_MMK2_take_cake_both_hands_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 26346, + "dataset_size": "993.15 MB", + "data_structure": "Galaxea_R1_Lite_storage_object_dish_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (89 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:47" + "train": "0:100" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -109292,8 +118753,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -109302,11 +118763,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_head_right_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -109315,8 +118776,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -109325,11 +118786,11 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -109338,8 +118799,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -109348,11 +118809,11 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -109361,8 +118822,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -109374,7 +118835,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -109389,36 +118850,14 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -109433,30 +118872,8 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "timestamp": { @@ -109607,6 +119024,66 @@ 2 ], "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" } }, "authors": { @@ -109628,118 +119105,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" - }, - "leju_robot_moving_parts_u": { - "path": "leju_robot_moving_parts_u", - "dataset_name": "moving_parts_u", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place the black part on the table with right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "parts", - "level1": "mechanical_parts", - "level2": "parts", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cabinet", - "level1": "home_storage", - "level2": "cabinet", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-149345", - "dataset_size": "10.4GB", - "statistics": { - "total_episodes": 100, - "total_frames": 149345, - "total_tasks": 1, - "total_videos": 300, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "5138770c-0dd3-4a50-a8a2-efa57aabc3e9", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the black part on the table with right gripper", - "Pick up the large material from the shelf.", - "Insert the large material into the corresponding slot on the workbench.", - "End", - "Grasp the black part with right gripper", - "Move the large material to the workbench.", - "Move to the initial position of the shelf.", - "Move to the table behind body", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_moving_parts_u_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_moving_parts_u_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Galaxea_R1_Lite_classify_object_three": { + "Agilex_Cobot_Magic_close_drawer_upper": { "task_categories": [ "robotics" ], @@ -109769,11 +119137,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_classify_object_three", + "dataset_name": "Agilex_Cobot_Magic_close_drawer_upper", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "commercial_convenience", - "level2": "supermarket", + "level1": "office_workspace", + "level2": "office", "level3": null, "level4": null, "level5": null @@ -109781,343 +119149,76 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "brown_basket", - "level1": "baskets", - "level2": "brown_basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yellow_basket", - "level1": "baskets", - "level2": "yellow_basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "any_fruits", - "level1": "fruits", - "level2": "any_fruits", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "any_vegetables", - "level1": "vegetables", - "level2": "any_vegetables", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "any_snacks", - "level1": "snacks", - "level2": "any_snacks", + "object_name": "three_layer_transparent_drawer", + "level1": "laboratory_supplies", + "level2": "three-layer_transparent_drawer", "level3": null, "level4": null, "level5": null - }, - { - "object_name": "any_bread", - "level1": "foobreadd", - "level3": "any_bread", - "level2": null, - "level4": null, - "level5": null } ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "place the food in the right basket with the right gripper, and place the non food items in the left basket with the left gripper." + "close the upper drawer." ], "sub_tasks": [ { - "subtask": "Grasp the rubiks cube and put it in the left basket", + "subtask": "Use the right gripper to touch the topmost layer of the storage cabinet", "subtask_index": 0 }, { - "subtask": "Place the tape in the center of the table", + "subtask": "Push the top drawer closed", "subtask_index": 1 }, { - "subtask": "Grasp the soft cleanser and put it in the left basket", + "subtask": "Use the right gripper to contact the middle shelf of the cabinet", "subtask_index": 2 }, { - "subtask": "Grasp the back scratcher and put it in the left basket", + "subtask": "End", "subtask_index": 3 }, { - "subtask": "Grasp the apple and put it in the right basket", + "subtask": "Push the middle drawer closed", "subtask_index": 4 }, - { - "subtask": "Grasp the yellow marker and put it in the left basket", - "subtask_index": 5 - }, - { - "subtask": "End", - "subtask_index": 6 - }, - { - "subtask": "Grasp the white eraser and put it in the left basket", - "subtask_index": 7 - }, - { - "subtask": "Grasp the power strip and put it in the left basket", - "subtask_index": 8 - }, - { - "subtask": "Grasp the square chewing gum and put it in the right basket", - "subtask_index": 9 - }, - { - "subtask": "Grasp the cleaning agent and put it in the left basket", - "subtask_index": 10 - }, - { - "subtask": "Grasp the blue marker pen and put it in the right basket", - "subtask_index": 11 - }, - { - "subtask": "Grasp the soda water and put it in the right basket", - "subtask_index": 12 - }, - { - "subtask": "Grasp the spoon and put it in the left basket", - "subtask_index": 13 - }, - { - "subtask": "Grasp the duck toys and put it in the left basket", - "subtask_index": 14 - }, - { - "subtask": "Grasp the blue marker pen and put it in the left basket", - "subtask_index": 15 - }, - { - "subtask": "Grasp the shampoo and put it in the left basket", - "subtask_index": 16 - }, - { - "subtask": "Grasp the triangle cake and put it in the right basket", - "subtask_index": 17 - }, - { - "subtask": "Grasp the brown plate and put it in the left basket", - "subtask_index": 18 - }, - { - "subtask": "Grasp the cookie and put it in the right basket", - "subtask_index": 19 - }, - { - "subtask": "Grasp the yellow cake and put it in the right basket", - "subtask_index": 20 - }, - { - "subtask": "Grasp the shower sphere and put it in the left basket", - "subtask_index": 21 - }, - { - "subtask": "Grasp the orange and put it in the right basket", - "subtask_index": 22 - }, - { - "subtask": "Grasp the compass and put it in the left basket", - "subtask_index": 23 - }, - { - "subtask": "Grasp the round bread and put it in the right basket", - "subtask_index": 24 - }, - { - "subtask": "Grasp the lemon and put it in the right basket", - "subtask_index": 25 - }, - { - "subtask": "Grasp the egg yolk pastry and put it in the right basket", - "subtask_index": 26 - }, - { - "subtask": "Grasp the soap and put it in the left basket", - "subtask_index": 27 - }, - { - "subtask": "Grasp the washing liquid and put it in the left basket", - "subtask_index": 28 - }, - { - "subtask": "Grasp the hard cleanser and put it in the left basket", - "subtask_index": 29 - }, - { - "subtask": "Grasp the milk and put it in the right basket", - "subtask_index": 30 - }, - { - "subtask": "Grasp the black marker and put it in the left basket", - "subtask_index": 31 - }, - { - "subtask": "Grasp the banana and put it in the right basket", - "subtask_index": 32 - }, - { - "subtask": "Grasp the black glass cup and put it in the left basket", - "subtask_index": 33 - }, - { - "subtask": "Grasp the blue marker and put it in the right basket", - "subtask_index": 34 - }, - { - "subtask": "Grasp the bath ball and put it in the left basket", - "subtask_index": 35 - }, - { - "subtask": "Abnormal", - "subtask_index": 36 - }, - { - "subtask": "Grasp the peeler and put it in the left basket", - "subtask_index": 37 - }, - { - "subtask": "Grasp the brown towel and put it in the left basket", - "subtask_index": 38 - }, - { - "subtask": "Grasp the peach and put it in the right basket", - "subtask_index": 39 - }, - { - "subtask": "Grasp the tea cup and put it in the left basket", - "subtask_index": 40 - }, - { - "subtask": "Grasp the brush and put it in the left basket", - "subtask_index": 41 - }, - { - "subtask": "Grasp the chocolate and put it in the right basket", - "subtask_index": 42 - }, - { - "subtask": "Grasp the grey towel and put it in the left basket", - "subtask_index": 43 - }, - { - "subtask": "Place the peach doll in the center of the table", - "subtask_index": 44 - }, - { - "subtask": "Grasp the rubiks cube and put it in the right basket", - "subtask_index": 45 - }, - { - "subtask": "Grasp the tape and put it in the left basket", - "subtask_index": 46 - }, - { - "subtask": "Grasp the bread slice and put it in the right basket", - "subtask_index": 47 - }, - { - "subtask": "Grasp the glasses case and put it in the left basket", - "subtask_index": 48 - }, - { - "subtask": "Grasp the soda water and put it in the left basket", - "subtask_index": 49 - }, - { - "subtask": "Grasp the peach doll and put it in the right basket", - "subtask_index": 50 - }, - { - "subtask": "Grasp the blue cup and put it in the left basket", - "subtask_index": 51 - }, - { - "subtask": "Grasp the spoon and put it in the right basket", - "subtask_index": 52 - }, - { - "subtask": "Grasp the pen container and put it in the left basket", - "subtask_index": 53 - }, - { - "subtask": "Grasp the red duck and put it in the left basket", - "subtask_index": 54 - }, - { - "subtask": "Grasp the glasses case and put it in the right basket", - "subtask_index": 55 - }, - { - "subtask": "Grasp the long bread and put it in the right basket", - "subtask_index": 56 - }, - { - "subtask": "Grasp the yogurt and put it in the right basket", - "subtask_index": 57 - }, - { - "subtask": "Grasp the potato chips and put it in the right basket", - "subtask_index": 58 - }, - { - "subtask": "Grasp the can and put it in the right basket", - "subtask_index": 59 - }, - { - "subtask": "Grasp the long bread and put it in the left basket", - "subtask_index": 60 - }, - { - "subtask": "Grasp the yellow duck and put it in the left basket", - "subtask_index": 61 - }, - { - "subtask": "Grasp the coke and put it in the right basket", - "subtask_index": 62 - }, { "subtask": "null", - "subtask_index": 63 + "subtask_index": 5 } ], "atomic_actions": [ "grasp", - "pick", - "place" + "push" ], "robot_name": [ - "Galaxea_R1_Lite" + "Agilex_Cobot_Magic" ], "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", "cam_right_wrist_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -110128,53 +119229,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 197, - "total_frames": 134891, + "total_episodes": 173, + "total_frames": 49994, "fps": 30, - "total_tasks": 64, - "total_videos": 788, + "total_tasks": 6, + "total_videos": 519, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, - "camera_views": 4, - "dataset_size": "7.32 GB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "522.40 MB" }, - "frame_num": 134891, - "dataset_size": "7.32 GB", - "data_structure": "Galaxea_R1_Lite_classify_object_three_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (185 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 49994, + "dataset_size": "522.40 MB", + "data_structure": "Agilex_Cobot_Magic_close_drawer_upper_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (161 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:196" + "train": "0:172" }, "features": { - "observation.images.cam_head_left_rgb": { - "dtype": "video", - "shape": [ - 720, - 1280, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 720, - "video.width": 1280, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -110183,8 +119261,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -110196,8 +119274,8 @@ "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -110206,8 +119284,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -110219,8 +119297,8 @@ "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -110229,8 +119307,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -110242,7 +119320,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -110251,20 +119329,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -110273,14 +119363,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -110432,26 +119534,6 @@ ], "dtype": "int32" }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, "gripper_mode_state": { "names": [ "left_gripper_mode", @@ -110491,6 +119573,26 @@ 2 ], "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } }, "authors": { @@ -110512,253 +119614,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" - }, - "RMC-AIDA-L_clean_table": { - "path": "RMC-AIDA-L_clean_table", - "dataset_name": "clean_table", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place", - "wipe" - ], - "tasks": "Place the rag on the table with the right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cup", - "level1": "container", - "level2": "cup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "blue_rag", - "level1": "clothing", - "level2": "blue_rag", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pruple_rag", - "level1": "clothing", - "level2": "pruple_rag", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "water", - "level1": "drink", - "level2": "water", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-514178", - "dataset_size": "3.5GB", - "statistics": { - "total_episodes": 776, - "total_frames": 514178, - "total_tasks": 4, - "total_videos": 2328, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "ed026bcf-f342-4259-9fce-12208d2ca5b3", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the rag on the table with the right gripper", - "Stand the paper cup upright with the left gripper", - "Grasp the paper cup with the left gripper", - "Grasp the paper cup with the right gripper", - "Grasp the rag with the left gripper", - "Grasp the rag with the right gripper", - "Wipe the stains off the table with the left gripper", - "Place the rag on the table with the left gripper", - "Stand the paper cup upright with the right gripper", - "end", - "Wipe the stains off the table with the right gripper", - "abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "RMC-AIDA-L_clean_table_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_clean_table_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "leju_robot_hotel_services_b": { - "path": "leju_robot_hotel_services_b", - "dataset_name": "hotel_services_b", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Hand the room key to the person.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "home_storage", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "card", - "level1": "nfc", - "level2": "card", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "sensor", - "level1": "electronic_products", - "level2": "sensor", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-209703", - "dataset_size": "14.0GB", - "statistics": { - "total_episodes": 288, - "total_frames": 209703, - "total_tasks": 1, - "total_videos": 864, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "1652000d-29c5-40d4-9d8d-3198ed81edbb", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Hand the room key to the person.", - "Place the ID card on the card reader.", - "Take the ID card from the person's hand.", - "Pick up the room key from the key card box.", - "Hand the ID card to the person.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "leju_robot_hotel_services_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_hotel_services_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Agilex_Cobot_Magic_storage_object_closest_apple": { + "Agilex_Cobot_Magic_storage_peach_right": { "task_categories": [ "robotics" ], @@ -110788,11 +119646,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_storage_object_closest_apple", + "dataset_name": "Agilex_Cobot_Magic_storage_peach_right", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "office_workspace", - "level2": "office", + "level1": "household", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -110816,41 +119674,9 @@ "level5": null }, { - "object_name": "mango", - "level1": "food", - "level2": "mango", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "apple", + "object_name": "peach", "level1": "food", - "level2": "apple", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "rubik's_cube", - "level1": "toys", - "level2": "rubik's_cube", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "whiteboard_erasers", - "level1": "stationery", - "level2": "whiteboard_erasers", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bathing_in_flowers", - "level1": "daily_necessities", - "level2": "bathing_in_flowers", + "level2": "peach", "level3": null, "level4": null, "level5": null @@ -110858,80 +119684,28 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "use a picker to grab the item closest to the apple and place it in the basket." + "put the peach in the basket with right arm." ], "sub_tasks": [ { - "subtask": "Grasp the Rubik's Cube with the left gripper", + "subtask": "Grasp the peach with right gripper", "subtask_index": 0 }, { - "subtask": "Grasp the chalkboard eraser with the right gripper", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "Place the chalkboard eraser into the basket with the right gripper", + "subtask": "Place the peach in the basket with right gripper", "subtask_index": 2 }, { - "subtask": "Place the shower sphere into the basket with the left gripper", + "subtask": "Abnormal", "subtask_index": 3 }, - { - "subtask": "Place the mango into the basket with the right gripper", - "subtask_index": 4 - }, - { - "subtask": "Place the Rubik's Cube into the basket with the left gripper", - "subtask_index": 5 - }, - { - "subtask": "Place the shower sphere into the basket with the right gripper", - "subtask_index": 6 - }, - { - "subtask": "Grasp the mango with the right gripper", - "subtask_index": 7 - }, - { - "subtask": "Grasp the Rubik's Cube with the right gripper", - "subtask_index": 8 - }, - { - "subtask": "End", - "subtask_index": 9 - }, - { - "subtask": "Place the Rubik's Cube into the basket with the right gripper", - "subtask_index": 10 - }, - { - "subtask": "Grasp the mango with the left gripper", - "subtask_index": 11 - }, - { - "subtask": "Place the mango into the basket with the left gripper", - "subtask_index": 12 - }, - { - "subtask": "Grasp the shower sphere with the right gripper", - "subtask_index": 13 - }, - { - "subtask": "Place the chalkboard eraser into the basket with the left gripper", - "subtask_index": 14 - }, - { - "subtask": "Grasp the shower sphere with the left gripper", - "subtask_index": 15 - }, - { - "subtask": "Grasp the chalkboard eraser with the left gripper", - "subtask_index": 16 - }, { "subtask": "null", - "subtask_index": 17 + "subtask_index": 4 } ], "atomic_actions": [ @@ -110969,23 +119743,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 49, - "total_frames": 13231, + "total_episodes": 99, + "total_frames": 25876, "fps": 30, - "total_tasks": 18, - "total_videos": 147, + "total_tasks": 5, + "total_videos": 297, "total_chunks": 1, "chunks_size": 1000, "state_dim": 26, "action_dim": 26, "camera_views": 3, - "dataset_size": "189.68 MB" + "dataset_size": "277.18 MB" }, - "frame_num": 13231, - "dataset_size": "189.68 MB", - "data_structure": "Agilex_Cobot_Magic_storage_object_closest_apple_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 25876, + "dataset_size": "277.18 MB", + "data_structure": "Agilex_Cobot_Magic_storage_peach_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:48" + "train": "0:98" }, "features": { "observation.images.cam_head_rgb": { @@ -111306,33 +120080,536 @@ }, "gripper_activity_action": { "names": [ - "left_gripper_activity", - "right_gripper_activity" + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, + "Airbot_MMK2_storage_bell_pepper": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_storage_bell_pepper", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "scene_level1", + "level2": "scene_level2", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "red_bell_pepper", + "level1": "vegetables", + "level2": "red_bell_pepper", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yellow_bell_pepper", + "level1": "vegetables", + "level2": "yellow_bell_pepper", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "white_basket", + "level1": "basket", + "level2": "white_basket", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "put the red peppers and yellow peppers into the box." + ], + "sub_tasks": [ + { + "subtask": "Place the yellow pepper into the left compartment of the storage box with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "End", + "subtask_index": 1 + }, + { + "subtask": "Grasp the green pepper with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the green pepper into the right compartment of the storage box with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Grasp the yellow pepper with the left gripper", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 48, + "total_frames": 7525, + "fps": 30, + "total_tasks": 6, + "total_videos": 192, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "200.59 MB" + }, + "frame_num": 7525, + "dataset_size": "200.59 MB", + "data_structure": "Airbot_MMK2_storage_bell_pepper_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:47" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, - "gripper_open_scale_state": { + "eef_acc_mag_state": { "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" + "left_eef_acc_mag", + "right_eef_acc_mag" ], - "dtype": "float32", "shape": [ 2 - ] + ], + "dtype": "int32" }, - "gripper_open_scale_action": { + "eef_acc_mag_action": { "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" + "left_eef_acc_mag", + "right_eef_acc_mag" ], - "dtype": "float32", "shape": [ 2 - ] + ], + "dtype": "int32" } }, "authors": { @@ -111354,9 +120631,127 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_push_away_book": { + "Cobot_Magic_pour_water_bottle": { + "path": "Cobot_Magic_pour_water_bottle", + "dataset_name": "pour_water_bottle", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "pour", + "place" + ], + "tasks": "End", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bottle", + "level1": "container", + "level2": "bottle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cup", + "level1": "container", + "level2": "cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "water", + "level1": "drink", + "level2": "water", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-43905", + "dataset_size": "1.1GB", + "statistics": { + "total_episodes": 92, + "total_frames": 43905, + "total_tasks": 1, + "total_videos": 276, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "e375b064-04ad-4cb8-a8d4-d0994c11fa75", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Grasp the mineral water bottle", + "Pour water into the cup", + "Anomaly", + "Abnormal", + "Pour water into the teacup", + "Place the water bottle", + "Pick up the water bottle", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_pour_water_bottle_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_pour_water_bottle_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_storage_pineapple": { "task_categories": [ "robotics" ], @@ -111386,11 +120781,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_push_away_book", + "dataset_name": "Airbot_MMK2_storage_pineapple", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "study_room", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -111398,9 +120793,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "book", - "level1": "stationery", - "level2": "book", + "object_name": "pineapple", + "level1": "fruit", + "level2": "pineapple", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "storage_box", + "level1": "storage_utensils", + "level2": "storage_box", "level3": null, "level4": null, "level5": null @@ -111408,33 +120811,29 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pull out a book by hand." + "pick up the pineapple with right hand and put it into the storage box." ], "sub_tasks": [ { - "subtask": "End", + "subtask": "Place the pineapple into the storage box with the right gripper", "subtask_index": 0 }, { - "subtask": "Abnormal", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "Lay the book down with the right gripper", + "subtask": "Grasp the pineapple with the right gripper", "subtask_index": 2 }, - { - "subtask": "Hold the book with the right gripper", - "subtask_index": 3 - }, { "subtask": "null", - "subtask_index": 4 + "subtask_index": 3 } ], "atomic_actions": [ - "pinch", - "clip", + "grasp", + "pick", "place" ], "robot_name": [ @@ -111469,23 +120868,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 244, - "total_frames": 36575, + "total_episodes": 49, + "total_frames": 5219, "fps": 30, - "total_tasks": 5, - "total_videos": 976, + "total_tasks": 4, + "total_videos": 196, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "1.40 GB" + "dataset_size": "140.76 MB" }, - "frame_num": 36575, - "dataset_size": "1.40 GB", - "data_structure": "Airbot_MMK2_push_away_book_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (232 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 5219, + "dataset_size": "140.76 MB", + "data_structure": "Airbot_MMK2_storage_pineapple_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:243" + "train": "0:48" }, "features": { "observation.images.cam_head_rgb": { @@ -111839,1054 +121238,1471 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Galaxea_R1_Lite_storage_object_dish": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" + "Cobot_Magic_desktop_organization": { + "path": "Cobot_Magic_desktop_organization", + "dataset_name": "desktop_organization", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" ], - "tags": [ - "RoboCOIN", - "LeRobot" + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" ], - "license": "apache-2.0", - "configs": [ + "tasks": "Hand over the waste paper", + "objects": [ { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } - ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_storage_object_dish", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "household", - "level2": "living_room", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ { - "object_name": "dish", - "level1": "plates", - "level2": "dish", + "object_name": "plate", + "level1": "container", + "level2": "plate", "level3": null, "level4": null, "level5": null }, { "object_name": "banana", - "level1": "fruits", + "level1": "fruit", "level2": "banana", "level3": null, "level4": null, "level5": null }, { - "object_name": "bathing_in_flowers", - "level1": "daily_necessities", - "level2": "bathing_in_flowers", + "object_name": "waste_paper", + "level1": "garbage", + "level2": "waste_paper", "level3": null, "level4": null, "level5": null }, { - "object_name": "blue_cup", - "level1": "cups", - "level2": "blue_cup", + "object_name": "basket", + "level1": "container", + "level2": "basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "blue_pot", - "level1": "kitchen_supplies", - "level2": "blue_pot", + "object_name": "eraser", + "level1": "office_supplies", + "level2": "eraser", "level3": null, "level4": null, "level5": null }, { - "object_name": "toast_slices", - "level1": "bread", - "level2": "toast_slices", + "object_name": "knife", + "level1": "office_supplies", + "level2": "knife", "level3": null, "level4": null, "level5": null }, { - "object_name": "brown_towel", - "level1": "towels", - "level2": "brown_towel", + "object_name": "ruler", + "level1": "office_supplies", + "level2": "ruler", "level3": null, "level4": null, "level5": null }, { - "object_name": "can", - "level1": "snacks", - "level2": "can", + "object_name": "bread", + "level1": "food", + "level2": "bread", "level3": null, "level4": null, "level5": null }, { - "object_name": "coke", - "level1": "beverages", - "level2": "coke", + "object_name": "small_pieces_of_paper", + "level1": "garbage", + "level2": "small_pieces_of_paper", "level3": null, "level4": null, "level5": null }, { - "object_name": "potato_chips", - "level1": "snacks", - "level2": "potato_chips", + "object_name": "coffee", + "level1": "drink", + "level2": "coffee", "level3": null, "level4": null, "level5": null }, { - "object_name": "chocolate", - "level1": "snacks", - "level2": "chocolate", + "object_name": "pencil_sharpener", + "level1": "office_supplies", + "level2": "pencil_sharpener", "level3": null, "level4": null, "level5": null }, { - "object_name": "compass", - "level1": "stationery", - "level2": "compass", + "object_name": "grape", + "level1": "fruit", + "level2": "grape", "level3": null, "level4": null, "level5": null }, { - "object_name": "block_pillar", - "level1": "toys", - "level2": "block_pillar", + "object_name": "plastic_sheet", + "level1": "garbage", + "level2": "plastic_sheet", "level3": null, "level4": null, "level5": null }, { - "object_name": "egg_beater", - "level1": "kitchen_supplies", - "level2": "egg_beater", + "object_name": "teacup", + "level1": "container", + "level2": "teacup", "level3": null, "level4": null, "level5": null }, { - "object_name": "eraser", - "level1": "stationery", - "level2": "eraser", + "object_name": "colored_glue", + "level1": "office_supplies", + "level2": "colored_glue", "level3": null, "level4": null, "level5": null }, { - "object_name": "chewing_gum", - "level1": "snacks", - "level2": "chewing_gum", + "object_name": "scissor", + "level1": "office_supplies", + "level2": "scissor", "level3": null, "level4": null, "level5": null }, { - "object_name": "mentholatum_facial_cleanser", - "level1": "daily_chemical_products", - "level2": "mentholatum_facial_cleanser", + "object_name": "mango", + "level1": "fruit", + "level2": "mango", "level3": null, "level4": null, "level5": null }, { - "object_name": "green_lemon", - "level1": "fruits", - "level2": "green_lemon", + "object_name": "fruit_tray", + "level1": "container", + "level2": "fruit_tray", "level3": null, "level4": null, "level5": null }, { - "object_name": "peach", - "level1": "fruits", - "level2": "peach", + "object_name": "paper_ball", + "level1": "garbage", + "level2": "paper_ball", "level3": null, "level4": null, "level5": null }, { - "object_name": "power_strip", - "level1": "electrical_control_equipment", - "level2": "power_strip", + "object_name": "garbage_bin", + "level1": "container", + "level2": "garbage_bin", "level3": null, "level4": null, "level5": null }, { - "object_name": "round_bread", - "level1": "bread", - "level2": "round_bread", + "object_name": "water_bottle", + "level1": "container", + "level2": "water_bottle", "level3": null, "level4": null, "level5": null }, { - "object_name": "mentholatum_facial_cleanser", - "level1": "daily_chemical_products", - "level2": "mentholatum_facial_cleanser", + "object_name": "pen", + "level1": "office_supplies", + "level2": "pen", "level3": null, "level4": null, "level5": null }, { - "object_name": "square_building_blocks", - "level1": "toys", - "level2": "square_building_blocks", + "object_name": "pen_holder", + "level1": "garbage", + "level2": "spitball", "level3": null, "level4": null, "level5": null }, { - "object_name": "tape", - "level1": "stationery", - "level2": "tape", + "object_name": "cola", + "level1": "drink", + "level2": "cola", "level3": null, "level4": null, "level5": null }, { - "object_name": "cake", - "level1": "bread", - "level2": "cake", + "object_name": "milk", + "level1": "drink", + "level2": "milk", "level3": null, "level4": null, "level5": null - }, + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-2026628", + "dataset_size": "32.3GB", + "statistics": { + "total_episodes": 1070, + "total_frames": 2026628, + "total_tasks": 6, + "total_videos": 3210, + "total_chunks": 2, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "32b7ba5a-b7c4-475c-8bcc-6b60402acd24", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Hand over the waste paper", + "Place the red pencil sharpener into the pen holder", + "place the silver glue into the pen holder", + "Place the cola can in the basket with left gripper", + "Place the waste paper in the basket with left gripper", + "Place the eraser in pen holder with left gripper", + "Grasp the waste paper with right gripper", + "Place the pear in the plate", + "Static", + "Hand over the plastic", + "Grasp the bottle with left gripper", + "pick up the ruler", + "Place the black pen in pen holder with left gripper", + "Grasp the banana with left gripper", + "Pick up the banana", + "Hand over the banana", + "Pick up the pen with right arm", + "Place the cola can in the basket with right gripper", + "Hand over the grapes", + "Place the basket on the table with right gripper", + "Pick up the red pencil sharpener", + "Stand the bottle with left gripper", + "Pick up the waste paper", + "Grasp the blue pen with left gripper", + "pick up the yellow knife", + "Place the eraser into the pen holder", + "Place the black pen in pen holder with right gripper", + "Place the gray pen in pen holder with right gripper", + "Place the gray pen in pen holder with left gripper", + "Place the milk in the basket with right gripper", + "pick up the red glue", + "Hand over the yellow glue", + "Hand over the bottle", + "Place the plastic in the basket", + "Pick up the grapes", + "pick up the pink scissors", + "pick up the silver glue", + "Abnormal", + "Place the blue pen in pen holder with right gripper", + "Place the blue pen in pen holder with left gripper", + "Place the yellow pen in pen holder with right gripper", + "Grasp the cola can with right gripper", + "Grasp the mango with left gripper", + "Grasp the black pen with left gripper", + "Grasp the blue pen with right gripper", + "Grasp the eraser with right gripper", + "Pick up the plastic", + "Place the waste paper in the basket with right gripper", + "Grasp the cola can with left gripper", + "Place the cola can on the table with left gripper", + "Pick up the eraser", + "Grasp the waste paper with left gripper", + "Grasp the yellow pen with right gripper", + "Place the pink scissors into the pen holder", + "Lift the basket with left gripper", + "Pick up the bottle with the right arm", + "Hand over the bread", + "Place the grapes in the plate", + "transfer the pen to the left arm", + "transfer the bottle to the left arm", + "Hand over the red glue", + "End", + "Hand over the yellow knife", + "Hand over the ruler", + "Place the blue scissors into the pen holder", + "Place the yellow pen in pen holder with left gripper", + "Hand over the blue knife", + "Place the pen into the pen holder", + "Pick up the pear", + "transfer the pen to the right arm", + "Place the blue knife into the pen holder", + "Place the mango in the plate with left gripper", + "pick up the yellow glue", + "Hand over the pear", + "Place the blue pencil sharpener into the pen holder", + "Grasp the black pen with right gripper", + "Hand over the pink scissors", + "Pick up the bottle with the left arm", + "Grasp the gray pen with right gripper", + "Grasp the eraser with left gripper", + "Place the eraser in pen holder with right gripper", + "Grasp the basket with left gripper", + "Pick up the pen with left arm", + "Pick up the blue pencil sharpener", + "pick up the bread", + "Hand over the pen", + "Place the milk in the basket with left gripper", + "place the red glue into the pen holder", + "Place the banana in the plate with left gripper", + "Hand the basket to the right gripper with the left gripper", + "place the bread in the plate", + "Grasp the yellow pen with left gripper", + "Hand over the blue scissors", + "place the yellow glue into the pen holder", + "pick up the blue knife", + "Place the cola can on the table with right gripper", + "Place the bottle", + "Grasp the gray pen with left gripper", + "Place the ruler into the pen holder", + "pick up the blue scissors", + "Grasp the milk with right gripper", + "Place the banana in the plate", + "Grasp the milk with left gripper", + "Place the basket on the table with left gripper", + "Hand over the silver glue", + "Place the waste paper in the basket", + "place the yellow knife into the pen holder", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Cobot_Magic_desktop_organization_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)", + "structure": "Cobot_Magic_desktop_organization_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)" + }, + "AgiBot-g1_box_storage_a": { + "path": "AgiBot-g1_box_storage_a", + "dataset_name": "box_storage_a", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick" + ], + "tasks": "Place the mouse and the power cord paper box into the container.", + "objects": [ { - "object_name": "duck", - "level1": "doll", - "level2": "duck", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "ambrosial_yogurt", - "level1": "beverages", - "level2": "ambrosial_yogurt", + "object_name": "box", + "level1": "container", + "level2": "box", "level3": null, "level4": null, "level5": null } ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "use a gripper to pick the target object and place on the dish." + "operation_platform_height": null, + "frame_range": "0-11770", + "dataset_size": "4.8GB", + "statistics": { + "total_episodes": 21, + "total_frames": 11770, + "total_tasks": 1, + "total_videos": 168, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "a1e4a24d-cfb7-4719-a86c-d455555bc395", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" ], "sub_tasks": [ + "Place the mouse and the power cord paper box into the container.", + "Pick up the mouse and the power cord paper box.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AgiBot-g1_box_storage_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_box_storage_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "alpha_bot_2_press_the_button_b": { + "path": "alpha_bot_2_press_the_button_b", + "dataset_name": "press_the_button_b", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "pressbutton", + "push" + ], + "tasks": "Touch the bottle with left gripper", + "objects": [ { - "subtask": "Grasp the blue pot with the left gripper", - "subtask_index": 0 - }, - { - "subtask": "Place the back scratcher on the dish with the right gripper", - "subtask_index": 1 - }, - { - "subtask": "Grasp the plugboard with the left gripper", - "subtask_index": 2 - }, - { - "subtask": "Place the soft facial cleanser on the dish with the right gripper", - "subtask_index": 3 - }, - { - "subtask": "Grasp the potato chips with the right gripper", - "subtask_index": 4 - }, - { - "subtask": "Grasp the banana with the left gripper", - "subtask_index": 5 - }, - { - "subtask": "Grasp the compasses with the right gripper", - "subtask_index": 6 - }, - { - "subtask": "Grasp the duck toy with the left gripper", - "subtask_index": 7 - }, - { - "subtask": "Place the round bread on the dish with the left gripper", - "subtask_index": 8 - }, - { - "subtask": "Grasp the blue cup with the left gripper", - "subtask_index": 9 - }, - { - "subtask": "Place the compasses on the dish with the right gripper", - "subtask_index": 10 - }, - { - "subtask": "Place the duck toy on the dish with the right gripper", - "subtask_index": 11 - }, - { - "subtask": "Place the round wooden block on the dish with the right gripper", - "subtask_index": 12 - }, - { - "subtask": "Place the green lemon on the dish with the right gripper", - "subtask_index": 13 - }, - { - "subtask": "Grasp the back scratcher with the right gripper", - "subtask_index": 14 - }, - { - "subtask": "Grasp the square chewing gum with the left gripper", - "subtask_index": 15 - }, - { - "subtask": "Grasp the chocolate cake with the right gripper", - "subtask_index": 16 - }, - { - "subtask": "Grasp the shower sphere with the left gripper", - "subtask_index": 17 - }, - { - "subtask": "Place the peach on the dish with the left gripper", - "subtask_index": 18 - }, - { - "subtask": "Grasp the plugboard with the right gripper", - "subtask_index": 19 - }, - { - "subtask": "Grasp the tin with the left gripper", - "subtask_index": 20 - }, - { - "subtask": "Grasp the brown towel with the left gripper", - "subtask_index": 21 - }, - { - "subtask": "Place the brown towel on the dish with the right gripper", - "subtask_index": 22 - }, - { - "subtask": "Grasp the hard facial cleanser with the left gripper", - "subtask_index": 23 - }, - { - "subtask": "Place the plugboard on the dish with the right gripper", - "subtask_index": 24 - }, - { - "subtask": "Grasp the peach with the right gripper", - "subtask_index": 25 - }, - { - "subtask": "Grasp the brown towel with the right gripper", - "subtask_index": 26 - }, - { - "subtask": "Place the coke on the dish with the right gripper", - "subtask_index": 27 - }, - { - "subtask": "Place the banana on the dish with the left gripper", - "subtask_index": 28 - }, - { - "subtask": "Place the peach on the dish with the right gripper", - "subtask_index": 29 - }, - { - "subtask": "Grasp the hard facial cleanser with the right gripper", - "subtask_index": 30 - }, - { - "subtask": "Place the potato chips on the dish with the right gripper", - "subtask_index": 31 - }, - { - "subtask": "Grasp the peach with the left gripper", - "subtask_index": 32 - }, - { - "subtask": "Grasp the green lemon with the right gripper", - "subtask_index": 33 - }, - { - "subtask": "Place the chocolate cake on the dish with the right gripper", - "subtask_index": 34 - }, - { - "subtask": "Place the plugboard on the dish with the left gripper", - "subtask_index": 35 - }, - { - "subtask": "Grasp the bread slice with the right gripper", - "subtask_index": 36 - }, - { - "subtask": "Place the square chewing gum on the dish with the right gripper", - "subtask_index": 37 - }, - { - "subtask": "Grasp the duck toy with the right gripper", - "subtask_index": 38 + "object_name": "mineral_water", + "level1": "drinks", + "level2": "mineral_water", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "End", - "subtask_index": 39 + "object_name": "button", + "level1": "toy", + "level2": "button", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the blackboard erasure on the dish with the left gripper", - "subtask_index": 40 - }, + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-50921", + "dataset_size": "943.7MB", + "statistics": { + "total_episodes": 65, + "total_frames": 50921, + "total_tasks": 1, + "total_videos": 260, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "627d6f91-a0ba-46b7-96dd-616242c3a6af", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Touch the bottle with left gripper", + "End", + "Move the bottle away with right gripper", + "Abnormal", + "Press the button with right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "alpha_bot_2_press_the_button_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "alpha_bot_2_press_the_button_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "AgiBot-g1_box_storage_tool": { + "path": "AgiBot-g1_box_storage_tool", + "dataset_name": "box_storage_tool", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "End", + "objects": [ { - "subtask": "Grasp the blackboard erasure with the left gripper", - "subtask_index": 41 + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the coke with the left gripper", - "subtask_index": 42 + "object_name": "box", + "level1": "home_storage", + "level2": "box", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the tape on the dish with the right gripper", - "subtask_index": 43 - }, + "object_name": "tool", + "level1": "tool", + "level2": "tool", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-32860", + "dataset_size": "1.2GB", + "statistics": { + "total_episodes": 99, + "total_frames": 32860, + "total_tasks": 1, + "total_videos": 297, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "4ed6d684-19e6-4881-9b9f-eabbb1a6789d", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "End", + "Abnormal", + "Grasp the data cable", + "Place the data cable in the another box", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AgiBot-g1_box_storage_tool_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_box_storage_tool_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "R1_Lite_move_the_position_of_the_duck": { + "path": "R1_Lite_move_the_position_of_the_duck", + "dataset_name": "move_the_position_of_the_duck", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "place", + "pick", + "grasp" + ], + "tasks": "Grasp the red duck with right gripper", + "objects": [ { - "subtask": "Place the bread slice on the dish with the right gripper", - "subtask_index": 44 + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the shower sphere on the dish with the right gripper", - "subtask_index": 45 + "object_name": "pen", + "level1": "office_supplies", + "level2": "pen", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the round wooden block with the left gripper", - "subtask_index": 46 + "object_name": "banana", + "level1": "fruit", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the compasses with the left gripper", - "subtask_index": 47 + "object_name": "bath_ball", + "level1": "daily_necessities", + "level2": "bath_ball", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the banana on the dish with the right gripper", - "subtask_index": 48 + "object_name": "bowl", + "level1": "container", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the blue pot with the right gripper", - "subtask_index": 49 + "object_name": "plate", + "level1": "container", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the round bread with the right gripper", - "subtask_index": 50 + "object_name": "can", + "level1": "container", + "level2": "can", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the chocolate cake with the left gripper", - "subtask_index": 51 + "object_name": "eraser", + "level1": "office_supplies", + "level2": "eraser", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the square chewing gum on the dish with the left gripper", - "subtask_index": 52 + "object_name": "hard_cleanser", + "level1": "daily_necessities", + "level2": "hard_cleanser", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the blue cup on the dish with the right gripper", - "subtask_index": 53 + "object_name": "long_bread", + "level1": "food", + "level2": "long_bread", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the hard facial cleanser on the dish with the right gripper", - "subtask_index": 54 + "object_name": "peeler", + "level1": "tool", + "level2": "peeler", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the blue cup on the dish with the left gripper", - "subtask_index": 55 + "object_name": "block", + "level1": "toy", + "level2": "block", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the tape with the right gripper", - "subtask_index": 56 + "object_name": "duck", + "level1": "toy", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the duck toy on the dish with the left gripper", - "subtask_index": 57 + "object_name": "soap", + "level1": "daily_necessities", + "level2": "soap", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the coke with the right gripper", - "subtask_index": 58 + "object_name": "box", + "level1": "container", + "level2": "box", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the square wooden block on the dish with the right gripper", - "subtask_index": 59 + "object_name": "basket", + "level1": "container", + "level2": "basket", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the square chewing gum with the right gripper", - "subtask_index": 60 + "object_name": "cola", + "level1": "drink", + "level2": "cola", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the round bread on the dish with the right gripper", - "subtask_index": 61 + "object_name": "detergent", + "level1": "daily_necessities", + "level2": "detergent", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the shower sphere with the right gripper", - "subtask_index": 62 + "object_name": "egg_beater", + "level1": "electrical_appliances", + "level2": "egg_beater", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the brown towel on the dish with the left gripper", - "subtask_index": 63 + "object_name": "towel", + "level1": "clothing", + "level2": "towel", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the bread slice on the dish with the left gripper", - "subtask_index": 64 + "object_name": "orange", + "level1": "fruit", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the bread slice with the left gripper", - "subtask_index": 65 + "object_name": "peach", + "level1": "fruit", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the yogurt with the left gripper", - "subtask_index": 66 + "object_name": "marker", + "level1": "office_supplies", + "level2": "marker", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the blue pot on the dish with the left gripper", - "subtask_index": 67 + "object_name": "rubiks_cube", + "level1": "toy", + "level2": "rubiks_cube", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the blackboard erasure with the right gripper", - "subtask_index": 68 + "object_name": "bread_slice", + "level1": "food", + "level2": "bread_slice", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the coke on the dish with the left gripper", - "subtask_index": 69 + "object_name": "brush", + "level1": "daily_necessities", + "level2": "brush", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the square wooden block on the dish with the left gripper", - "subtask_index": 70 + "object_name": "yogurt", + "level1": "drink", + "level2": "yogurt", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the yogurt on the dish with the left gripper", - "subtask_index": 71 + "object_name": "power_strip", + "level1": "electric_appliance", + "level2": "power_strip", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the chocolate cake on the dish with the left gripper", - "subtask_index": 72 + "object_name": "milk", + "level1": "drink", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the compasses on the dish with the left gripper", - "subtask_index": 73 + "object_name": "soda", + "level1": "drink", + "level2": "soda", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the round wooden block on the dish with the left gripper", - "subtask_index": 74 + "object_name": "lime", + "level1": "fruit", + "level2": "lime", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the chocolate on the dish with the left gripper", - "subtask_index": 75 + "object_name": "coffee_capsule", + "level1": "drink", + "level2": "coffee_capsule", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the round bread with the left gripper", - "subtask_index": 76 + "object_name": "dish", + "level1": "container", + "level2": "dish", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the chocolate with the left gripper", - "subtask_index": 77 + "object_name": "glass", + "level1": "furniture", + "level2": "glass", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the square wooden block with the left gripper", - "subtask_index": 78 + "object_name": "egg_yolk_pastry", + "level1": "food", + "level2": "egg_yolk_pastry", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the blue cup with the right gripper", - "subtask_index": 79 + "object_name": "glasses_case", + "level1": "daily_necessities", + "level2": "glasses_case", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the soft facial cleanser with the right gripper", - "subtask_index": 80 + "object_name": "gum", + "level1": "daily_necessities", + "level2": "gum", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the shower sphere on the dish with the left gripper", - "subtask_index": 81 + "object_name": "tape", + "level1": "daily_necessities", + "level2": "tape", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the tin on the dish with the left gripper", - "subtask_index": 82 + "object_name": "soft_cleanser", + "level1": "daily_necessities", + "level2": "soft_cleanser", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the square wooden block with the right gripper", - "subtask_index": 83 + "object_name": "chips", + "level1": "food", + "level2": "chips", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the hard facial cleanser on the dish with the left gripper", - "subtask_index": 84 + "object_name": "chocolate", + "level1": "food", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the blackboard erasure on the dish with the right gripper", - "subtask_index": 85 + "object_name": "cookie", + "level1": "food", + "level2": "cookie", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Place the blue pot on the dish with the right gripper", - "subtask_index": 86 - }, + "object_name": "spoon", + "level1": "tableware", + "level2": "spoon", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-12223", + "dataset_size": "348.2MB", + "statistics": { + "total_episodes": 40, + "total_frames": 12223, + "total_tasks": 1, + "total_videos": 160, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "da6ac6c0-2744-4f18-98b8-d87cbfcc8ce1", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the red duck with right gripper", + "Static", + "Place the red duck on the table with right gripper", + "Grasp the red duck with left gripper", + "End", + "Place the red duck on the table with left gripper", + "Abnormal", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_move_the_position_of_the_duck_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_move_the_position_of_the_duck_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "RMC-AIDA-L_place_test_tube": { + "path": "RMC-AIDA-L_place_test_tube", + "dataset_name": "place_test_tube", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick", + "move" + ], + "tasks": "Place the test tube on the test tube rack with the left gripper ", + "objects": [ { - "subtask": "Grasp the round wooden block with the right gripper", - "subtask_index": 87 + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Grasp the banana with the right gripper", - "subtask_index": 88 + "object_name": "test_tube_rack", + "level1": "container", + "level2": "test_tube_rack", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "null", - "subtask_index": 89 + "object_name": "test_tube", + "level1": "container", + "level2": "test_tube", + "level3": null, + "level4": null, + "level5": null } ], - "atomic_actions": [ - "grasp", - "pick", - "place" + "operation_platform_height": 77.2, + "frame_range": "0-365863", + "dataset_size": "4.0GB", + "statistics": { + "total_episodes": 648, + "total_frames": 365863, + "total_tasks": 1, + "total_videos": 1944, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "7ed8e496-1917-4844-a0f7-98fe52c1477d", + "language": [ + "en", + "zh" ], - "robot_name": [ - "Galaxea_R1_Lite" + "task_categories": [ + "robotics" ], - "end_effector_type": "two_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "sub_tasks": [ + "Place the test tube on the test tube rack with the left gripper ", + "End", + "Pass the test tube from the left gripper to the right gripper", + "pick up the test tube with the right gripper ", + "Place the test tubes on the test tube rack with the left gripper", + "move the test tube from the right gripper to the left gripper ", + "Pick up the test tube with the right gripper", + "Grasp the arch-shaped block with the left gripper", + "Pick up the test tube with the left gripper", + "Place the test tubes on the test tube rack with the right gripper", + "Pass the test tube from the right gripper to the left gripper", + "null" ], - "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" + "data_schema": "RMC-AIDA-L_place_test_tube_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_place_test_tube_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "leju_robot_moving_parts_m": { + "path": "leju_robot_moving_parts_m", + "dataset_name": "moving_parts_m", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the black part on the table with right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "parts", + "level1": "mechanical_parts", + "level2": "parts", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cabinet", + "level1": "home_storage", + "level2": "cabinet", + "level3": null, + "level4": null, + "level5": null + } ], + "operation_platform_height": null, + "frame_range": "0-742399", + "dataset_size": "50.9GB", "statistics": { - "total_episodes": 101, - "total_frames": 26346, - "fps": 30, - "total_tasks": 90, - "total_videos": 404, + "total_episodes": 490, + "total_frames": 742399, + "total_tasks": 1, + "total_videos": 1470, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, - "camera_views": 4, - "dataset_size": "993.15 MB" + "fps": 30 }, - "frame_num": 26346, - "dataset_size": "993.15 MB", - "data_structure": "Galaxea_R1_Lite_storage_object_dish_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (89 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", - "splits": { - "train": "0:100" + "dataset_uuid": "fb9bf451-cf8b-4da5-b5c1-13529609f049", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the black part on the table with right gripper", + "Pick up the large material from the shelf.", + "Insert the large material into the corresponding slot on the workbench.", + "End", + "Grasp the black part with right gripper", + "Move the large material to the workbench.", + "Move to the initial position of the shelf.", + "Move to the table behind body", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" }, - "features": { - "observation.images.cam_head_left_rgb": { - "dtype": "video", - "shape": [ - 720, - 1280, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 720, - "video.width": 1280, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_head_right_rgb": { - "dtype": "video", - "shape": [ - 720, - 1280, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 720, - "video.width": 1280, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, - "observation.images.cam_left_wrist_rgb": { - "dtype": "video", - "shape": [ - 720, - 1280, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 720, - "video.width": 1280, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } - }, - "observation.images.cam_right_wrist_rgb": { - "dtype": "video", - "shape": [ - 720, - 1280, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 720, - "video.width": 1280, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "leju_robot_moving_parts_m_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_moving_parts_m_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "AIRBOT_MMK2_stacking_blocks": { + "path": "AIRBOT_MMK2_stacking_blocks", + "dataset_name": "stacking_blocks", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the yellow cube on the orange cube with the right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, - "observation.state": { - "dtype": "float32", - "shape": [ - 14 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" - ] - }, - "action": { - "dtype": "float32", - "shape": [ - 14 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" - ] - }, - "timestamp": { - "dtype": "float32", - "shape": [ - 1 - ], - "names": null - }, - "frame_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "episode_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "task_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null - }, - "subtask_annotation": { - "names": null, - "shape": [ - 5 - ], - "dtype": "int32" - }, - "scene_annotation": { - "names": null, - "shape": [ - 1 - ], - "dtype": "int32" - }, - "eef_sim_pose_state": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_sim_pose_action": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "shape": [ - 12 - ], - "dtype": "float32" - }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_direction_action": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" + { + "object_name": "building_block", + "level1": "toy", + "level2": "building_block", + "level3": null, + "level4": null, + "level5": null } + ], + "operation_platform_height": 77.2, + "frame_range": "0-52766", + "dataset_size": "2.2GB", + "statistics": { + "total_episodes": 213, + "total_frames": 52766, + "total_tasks": 5, + "total_videos": 852, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "cdff2820-6ed6-414f-add9-1454a57a81b2", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the yellow cube on the orange cube with the right gripper", + "Static", + "Grasp the blue rhombic cuboid block with the right gripper", + "Grasp the red cuboid block with the left gripper", + "Grasp the blue small cube with the left gripper", + "Place the red small cube on the blue small cube with the right gripper", + "Place the red cuboid block in the center of view with the left gripper", + "Grasp the big red cube with the left gripper", + "Abnormal", + "Place the orange cube on the yellow cube with the right gripper", + "Place the green small cube on the blue small cube with the right gripper", + "Grasp the green small cube with the right gripper", + "Grasp the red small cube with the right gripper", + "Grasp the blue cube with the right gripper", + "Place the blue cube on the red cube with the left gripper", + "Grasp the orange cube with the right gripper", + "Grasp the blue cube with the left gripper", + "Place the blue small cube on the small table with the left gripper", + "End", + "Place the blue rhombic cuboid block on the red cuboid block with the right gripper", + "Place the blue small cube on the yellow small cube with the left gripper", + "Place the big red cube in the center of view with the left gripper", + "Grasp the yellow cube with the right gripper", + "Place theblue cube on the big red cube with the right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" }, "authors": { "contributed_by": [ { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" } ] }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AIRBOT_MMK2_stacking_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_stacking_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Agilex_Cobot_Magic_close_drawer_upper": { + "Airbot_MMK2_move_paper_box": { "task_categories": [ "robotics" ], @@ -112916,11 +122732,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_close_drawer_upper", + "dataset_name": "Airbot_MMK2_move_paper_box", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "office_workspace", - "level2": "office", + "level1": "household", + "level2": "study_room", "level3": null, "level4": null, "level5": null @@ -112928,17 +122744,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", + "object_name": "paper_box", "level1": "home_storage", - "level2": "table", + "level2": "paper_box", "level3": null, "level4": null, "level5": null }, { - "object_name": "three_layer_transparent_drawer", + "object_name": "white_lid", "level1": "laboratory_supplies", - "level2": "three-layer_transparent_drawer", + "level2": "white_lid", "level3": null, "level4": null, "level5": null @@ -112946,58 +122762,69 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "close the upper drawer." + "pick up the cardboard box with both hands and place it on the lid." ], "sub_tasks": [ { - "subtask": "Use the right gripper to touch the topmost layer of the storage cabinet", + "subtask": "End", "subtask_index": 0 }, { - "subtask": "Push the top drawer closed", + "subtask": "Place the phone case box on the white lid with the right gripper", "subtask_index": 1 }, { - "subtask": "Use the right gripper to contact the middle shelf of the cabinet", + "subtask": "Grasp the mouse box with the left gripper", "subtask_index": 2 }, { - "subtask": "End", + "subtask": "Abnormal", "subtask_index": 3 }, { - "subtask": "Push the middle drawer closed", + "subtask": "Static", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "Grasp the phone case box with the right gripper", "subtask_index": 5 + }, + { + "subtask": "Place the mouse box on the white lid with the left gripper", + "subtask_index": 6 + }, + { + "subtask": "null", + "subtask_index": 7 } ], "atomic_actions": [ "grasp", - "push" + "pick", + "place" ], "robot_name": [ - "Agilex_Cobot_Magic" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -113008,23 +122835,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 173, - "total_frames": 49994, + "total_episodes": 47, + "total_frames": 5070, "fps": 30, - "total_tasks": 6, - "total_videos": 519, + "total_tasks": 8, + "total_videos": 188, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "522.40 MB" + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "190.01 MB" }, - "frame_num": 49994, - "dataset_size": "522.40 MB", - "data_structure": "Agilex_Cobot_Magic_close_drawer_upper_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (161 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 5070, + "dataset_size": "190.01 MB", + "data_structure": "Airbot_MMK2_move_paper_box_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:172" + "train": "0:46" }, "features": { "observation.images.cam_head_rgb": { @@ -113096,10 +122923,33 @@ "has_audio": false } }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, "observation.state": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -113108,32 +122958,42 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -113142,26 +123002,36 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -113312,66 +123182,6 @@ 2 ], "dtype": "int32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] } }, "authors": { @@ -113393,9 +123203,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Agilex_Cobot_Magic_storage_peach_right": { + "agilex_cobot_magic_pass_object_right_to_left_red_tablecloth": { "task_categories": [ "robotics" ], @@ -113425,11 +123235,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_storage_peach_right", + "dataset_name": "agilex_cobot_magic_pass_object_right_to_left_red_tablecloth", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "living_room", + "level1": "commercial_convenience", + "level2": "supermarket", "level3": null, "level4": null, "level5": null @@ -113445,17 +123255,137 @@ "level5": null }, { - "object_name": "brown_basket", - "level1": "home_storage", - "level2": "brown_basket", + "object_name": "ambrosial_yogurt", + "level1": "food", + "level2": "ambrosial_yogurt", "level3": null, "level4": null, "level5": null }, { - "object_name": "peach", + "object_name": "banana", "level1": "food", - "level2": "peach", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "long_bread", + "level1": "food", + "level2": "long_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "milk", + "level1": "food", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yogurt", + "level1": "food", + "level2": "yogurt", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "grape", + "level1": "food", + "level2": "grape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ham_sausage", + "level1": "food", + "level2": "ham_sausage", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eggplant", + "level1": "food", + "level2": "eggplant", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eyeglass_case", + "level1": "laboratory_supplies", + "level2": "eyeglass_case", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rubik's_cube", + "level1": "toys", + "level2": "rubik's_cube", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "purple_trash_bag", + "level1": "trash", + "level2": "purple_trash_bag", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cleanser", + "level1": "daily_necessities", + "level2": "cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_necessities", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "whiteboard_eraser", + "level1": "stationery", + "level2": "whiteboard_eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "candle", + "level1": "daily_necessities", + "level2": "candle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "red_table_cloths", + "level1": "laboratory_supplies", + "level2": "red_table_cloths", "level3": null, "level4": null, "level5": null @@ -113463,575 +123393,201 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "put the peach in the basket with right arm." + "use the right gripper to pick up the item and transfer it from the right gripper to the left gripper." ], "sub_tasks": [ { - "subtask": "Grasp the peach with right gripper", + "subtask": "The left gripper places milk on the left side of the table", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "The left gripper places anmuxi on the left side of the table", "subtask_index": 1 }, { - "subtask": "Place the peach in the basket with right gripper", + "subtask": "The left gripper places Rubik's Cube on the left side of the table", "subtask_index": 2 }, { - "subtask": "Abnormal", + "subtask": "Pass the blue blackboard erasure to the left gripper", "subtask_index": 3 }, { - "subtask": "null", + "subtask": "End", "subtask_index": 4 - } - ], - "atomic_actions": [ - "grasp", - "lift", - "lower" - ], - "robot_name": [ - "Agilex_Cobot_Magic" - ], - "end_effector_type": "two_finger_gripper", - "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", - "sensor_list": [ - "cam_head_rgb", - "cam_left_wrist_rgb", - "cam_right_wrist_rgb" - ], - "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" - }, - "depth_enabled": false, - "coordinate_definition": "right-hand-frame", - "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", - "annotations": [ - "eef_acc_mag_annotation.jsonl", - "eef_direction_annotation.jsonl", - "eef_velocity_annotation.jsonl", - "gripper_activity_annotation.jsonl", - "gripper_mode_annotation.jsonl", - "scene_annotations.jsonl", - "subtask_annotations.jsonl" - ], - "statistics": { - "total_episodes": 99, - "total_frames": 25876, - "fps": 30, - "total_tasks": 5, - "total_videos": 297, - "total_chunks": 1, - "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "277.18 MB" - }, - "frame_num": 25876, - "dataset_size": "277.18 MB", - "data_structure": "Agilex_Cobot_Magic_storage_peach_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", - "splits": { - "train": "0:98" - }, - "features": { - "observation.images.cam_head_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } }, - "observation.images.cam_left_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } + { + "subtask": "Pass the eggplant to the left gripper", + "subtask_index": 5 }, - "observation.images.cam_right_wrist_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } + { + "subtask": "Pass the Rubik's Cube to the left gripper", + "subtask_index": 6 }, - "observation.state": { - "dtype": "float32", - "shape": [ - 26 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" - ] + { + "subtask": "The left gripper places yogurt on the left side of the table", + "subtask_index": 7 }, - "action": { - "dtype": "float32", - "shape": [ - 26 - ], - "names": [ - "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" - ] + { + "subtask": "Pass the square chewing gum to the left gripper", + "subtask_index": 8 }, - "timestamp": { - "dtype": "float32", - "shape": [ - 1 - ], - "names": null + { + "subtask": "Use the right gripper to grab the grape on the right side of the table", + "subtask_index": 9 }, - "frame_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "subtask": "Pass the yogurt to the left gripper", + "subtask_index": 10 }, - "episode_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "subtask": "The left gripper places eggplant on the left side of the table", + "subtask_index": 11 }, - "index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "subtask": "The left gripper places grape on the left side of the table", + "subtask_index": 12 }, - "task_index": { - "dtype": "int64", - "shape": [ - 1 - ], - "names": null + { + "subtask": "Use the right gripper to grab the milk on the right side of the table", + "subtask_index": 13 }, - "subtask_annotation": { - "names": null, - "dtype": "int32", - "shape": [ - 5 - ] + { + "subtask": "Pass the milk to the left gripper", + "subtask_index": 14 }, - "scene_annotation": { - "names": null, - "dtype": "int32", - "shape": [ - 1 - ] + { + "subtask": "Use the right gripper to grab the eggplant on the right side of the table", + "subtask_index": 15 }, - "eef_sim_pose_state": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "dtype": "float32", - "shape": [ - 12 - ] + { + "subtask": "The left gripper places xx on the left side of the table", + "subtask_index": 16 }, - "eef_sim_pose_action": { - "names": [ - "left_eef_pos_x", - "left_eef_pos_y", - "left_eef_pos_z", - "left_eef_rot_x", - "left_eef_rot_y", - "left_eef_rot_z", - "right_eef_pos_x", - "right_eef_pos_y", - "right_eef_pos_z", - "right_eef_rot_x", - "right_eef_rot_y", - "right_eef_rot_z" - ], - "dtype": "float32", - "shape": [ - 12 - ] + { + "subtask": "Pass the shower sphereto the left gripper", + "subtask_index": 17 }, - "eef_direction_state": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "subtask": "Use the right gripper to grab the shower sphere on the right side of the table", + "subtask_index": 18 }, - "eef_direction_action": { - "names": [ - "left_eef_direction", - "right_eef_direction" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "subtask": "Use the right gripper to grab the Anmuxi on the right side of the table.", + "subtask_index": 19 }, - "eef_velocity_state": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "subtask": "Use the right gripper to grab the xx on the right side of the table", + "subtask_index": 20 }, - "eef_velocity_action": { - "names": [ - "left_eef_velocity", - "right_eef_velocity" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "subtask": "The left gripper places shower sphere on the left side of the table", + "subtask_index": 21 }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "subtask": "Use the right gripper to grab the blue blackboard erasure on the right side of the table", + "subtask_index": 22 }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "subtask": "Use the right gripper to grab the plush banana on the right side of the table", + "subtask_index": 23 }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "subtask": "Pass the plush banana to the left gripper", + "subtask_index": 24 }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "subtask": "Pass the shower sphere to the left gripper", + "subtask_index": 25 }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "subtask": "Pass the to the yogurt left gripper", + "subtask_index": 26 }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] + { + "subtask": "The left gripper places blue blackboard erasure on the left side of the table", + "subtask_index": 27 }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] + { + "subtask": "The left gripper places plush banana on the left side of the table", + "subtask_index": 28 }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] - } - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" - } - ] - }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" - }, - "Airbot_MMK2_storage_bell_pepper": { - "task_categories": [ - "robotics" - ], - "language": [ - "en" - ], - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "license": "apache-2.0", - "configs": [ { - "config_name": "default", - "data_files": "data/chunk-{id}/episode_{id}.parquet" - } - ], - "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", - "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + "subtask": "Use the right gripper to grab the square chewing gum on the right side of the table", + "subtask_index": 29 + }, + { + "subtask": "The left gripper places square chewing gum on the left side of the table", + "subtask_index": 30 }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_bell_pepper", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "scene_level1", - "level2": "scene_level2", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ { - "object_name": "red_bell_pepper", - "level1": "vegetables", - "level2": "red_bell_pepper", - "level3": null, - "level4": null, - "level5": null + "subtask": "The left gripper places eyeglass case on the left side of the table", + "subtask_index": 31 }, { - "object_name": "yellow_bell_pepper", - "level1": "vegetables", - "level2": "yellow_bell_pepper", - "level3": null, - "level4": null, - "level5": null + "subtask": "Pass the xx to the left gripper", + "subtask_index": 32 }, { - "object_name": "white_basket", - "level1": "basket", - "level2": "white_basket", - "level3": null, - "level4": null, - "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "put the red peppers and yellow peppers into the box." - ], - "sub_tasks": [ + "subtask": "Pass the eyeglass case to the left gripper", + "subtask_index": 33 + }, { - "subtask": "Place the yellow pepper into the left compartment of the storage box with the left gripper", - "subtask_index": 0 + "subtask": "Use the right gripper to grab the yogurt on the right side of the table", + "subtask_index": 34 }, { - "subtask": "End", - "subtask_index": 1 + "subtask": "Use the right gripper to grab the eyeglass case on the right side of the table", + "subtask_index": 35 }, { - "subtask": "Grasp the green pepper with the right gripper", - "subtask_index": 2 + "subtask": "Pass the yogurt to the left gripper", + "subtask_index": 36 }, { - "subtask": "Place the green pepper into the right compartment of the storage box with the right gripper", - "subtask_index": 3 + "subtask": "Use the right gripper to grab the Rubik's Cube on the right side of the table", + "subtask_index": 37 }, { - "subtask": "Grasp the yellow pepper with the left gripper", - "subtask_index": 4 + "subtask": "Pass the anmuxi to the left gripper", + "subtask_index": 38 + }, + { + "subtask": "Pass the grape to the left gripper", + "subtask_index": 39 }, { "subtask": "null", - "subtask_index": 5 + "subtask_index": 40 } ], "atomic_actions": [ "grasp", - "pick", - "place" + "lift", + "lower", + "handover", + "takeover" ], "robot_name": [ - "Airbot_MMK2" + "agilex_cobot_magic" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -114042,23 +123598,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 48, - "total_frames": 7525, + "total_episodes": 99, + "total_frames": 58861, "fps": 30, - "total_tasks": 6, - "total_videos": 192, + "total_tasks": 41, + "total_videos": 297, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "200.59 MB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "1.43 GB" }, - "frame_num": 7525, - "dataset_size": "200.59 MB", - "data_structure": "Airbot_MMK2_storage_bell_pepper_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 58861, + "dataset_size": "1.43 GB", + "data_structure": "Agilex_Cobot_Magic_pass_object_right_to_left_red_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:47" + "train": "0:98" }, "features": { "observation.images.cam_head_rgb": { @@ -114130,33 +123686,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, "observation.state": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -114165,42 +123698,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -114209,36 +123732,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -114278,17 +123791,17 @@ }, "subtask_annotation": { "names": null, + "dtype": "int32", "shape": [ 5 - ], - "dtype": "int32" + ] }, "scene_annotation": { "names": null, + "dtype": "int32", "shape": [ 1 - ], - "dtype": "int32" + ] }, "eef_sim_pose_state": { "names": [ @@ -114305,10 +123818,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_sim_pose_action": { "names": [ @@ -114325,70 +123838,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" ], - "dtype": "int32" + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } }, "authors": { @@ -114410,11 +123983,11 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Cobot_Magic_pour_water_bottle": { - "path": "Cobot_Magic_pour_water_bottle", - "dataset_name": "pour_water_bottle", + "R1_Lite_catch_the_water": { + "path": "R1_Lite_catch_the_water", + "dataset_name": "catch_the_water", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -114423,10 +123996,11 @@ "atomic_actions": [ "grasp", "pick", - "pour", - "place" + "place", + "open", + "close" ], - "tasks": "End", + "tasks": "Place the cup next to the water dispenser", "objects": [ { "object_name": "table", @@ -114437,9 +124011,9 @@ "level5": null }, { - "object_name": "bottle", + "object_name": "water_dispenser", "level1": "container", - "level2": "bottle", + "level2": "water_dispenser", "level3": null, "level4": null, "level5": null @@ -114451,29 +124025,128 @@ "level3": null, "level4": null, "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-133503", + "dataset_size": "5.6GB", + "statistics": { + "total_episodes": 148, + "total_frames": 133503, + "total_tasks": 1, + "total_videos": 444, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "071ba9f6-2258-439b-82bd-6fec0968ae25", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the cup next to the water dispenser", + "abnormal", + "Pick up the cup", + "Turn off the faucet", + "Place it under the water dispenser", + "Turn on the faucet", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_catch_the_water_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_catch_the_water_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "AIRBOT_MMK2_place_the_glasses_case_and_gold_bars": { + "path": "AIRBOT_MMK2_place_the_glasses_case_and_gold_bars", + "dataset_name": "place_the_glasses_case_and_gold_bars", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Place the glasses case on the middle of the table with the left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, { - "object_name": "water", - "level1": "drink", - "level2": "water", + "object_name": "glasses_case", + "level1": "container", + "level2": "glasses_case", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "gold_bar", + "level1": "metal", + "level2": "gold_bar", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-43905", - "dataset_size": "1.1GB", + "frame_range": "0-10180", + "dataset_size": "412.7MB", "statistics": { - "total_episodes": 92, - "total_frames": 43905, + "total_episodes": 49, + "total_frames": 10180, "total_tasks": 1, - "total_videos": 276, + "total_videos": 196, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "e375b064-04ad-4cb8-a8d4-d0994c11fa75", + "dataset_uuid": "fc8bbdff-5847-4a9e-b1c1-634bab1b4ed7", "language": [ "en", "zh" @@ -114482,14 +124155,13 @@ "robotics" ], "sub_tasks": [ + "Place the glasses case on the middle of the table with the left gripper", "End", - "Grasp the mineral water bottle", - "Pour water into the cup", - "Anomaly", + "Static", + "Place the gold bar on the glasses case with the right gripper", + "Grasp the glasses case with the left gripper", + "Grasp the gold bar the right gripper", "Abnormal", - "Pour water into the teacup", - "Place the water bottle", - "Pick up the water bottle", "null" ], "annotations": { @@ -114527,10 +124199,121 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Cobot_Magic_pour_water_bottle_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_pour_water_bottle_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_place_the_glasses_case_and_gold_bars_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_place_the_glasses_case_and_gold_bars_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_storage_pineapple": { + "AgiBot-g1_box_storage_part_a": { + "path": "AgiBot-g1_box_storage_part_a", + "dataset_name": "box_storage_part_a", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "place", + "pick", + "grasp" + ], + "tasks": "Pick up the IO bracket from the container box.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "part", + "level1": "tool", + "level2": "part", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "box", + "level1": "container", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tray", + "level1": "container", + "level2": "tray", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-205995", + "dataset_size": "105.7GB", + "statistics": { + "total_episodes": 335, + "total_frames": 205995, + "total_tasks": 1, + "total_videos": 2680, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "24378624-9647-4aad-82ac-d822bb7bc50d", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Pick up the IO bracket from the container box.", + "Place the IO bracket into the ingredient slot.", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AgiBot-g1_box_storage_part_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_box_storage_part_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_move_book_front": { "task_categories": [ "robotics" ], @@ -114560,11 +124343,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_pineapple", + "dataset_name": "Airbot_MMK2_move_book_front", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "kitchen", + "level2": "study_room", "level3": null, "level4": null, "level5": null @@ -114572,17 +124355,9 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "pineapple", - "level1": "fruit", - "level2": "pineapple", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "storage_box", - "level1": "storage_utensils", - "level2": "storage_box", + "object_name": "book", + "level1": "stationery", + "level2": "book", "level3": null, "level4": null, "level5": null @@ -114590,24 +124365,32 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the pineapple with right hand and put it into the storage box." + "pick up the book with your left hand and hand it to your right hand, then place it on top of another book." ], "sub_tasks": [ { - "subtask": "Place the pineapple into the storage box with the right gripper", + "subtask": "Deliver the yellow book from left gripper to right gripper", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Place the yellow book on the white book with the right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the pineapple with the right gripper", + "subtask": "Grasp the yellow book with the left gripper", "subtask_index": 2 }, { - "subtask": "null", + "subtask": "Static", "subtask_index": 3 + }, + { + "subtask": "End", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 } ], "atomic_actions": [ @@ -114648,20 +124431,20 @@ ], "statistics": { "total_episodes": 49, - "total_frames": 5219, + "total_frames": 10519, "fps": 30, - "total_tasks": 4, + "total_tasks": 6, "total_videos": 196, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "140.76 MB" + "dataset_size": "396.69 MB" }, - "frame_num": 5219, - "dataset_size": "140.76 MB", - "data_structure": "Airbot_MMK2_storage_pineapple_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 10519, + "dataset_size": "396.69 MB", + "data_structure": "Airbot_MMK2_move_book_front_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { "train": "0:48" }, @@ -114972,446 +124755,54 @@ ], "shape": [ 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - } - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" - } - ] - }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" - }, - "Cobot_Magic_desktop_organization": { - "path": "Cobot_Magic_desktop_organization", - "dataset_name": "desktop_organization", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Hand over the waste paper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "fruit", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "waste_paper", - "level1": "garbage", - "level2": "waste_paper", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "basket", - "level1": "container", - "level2": "basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "eraser", - "level1": "office_supplies", - "level2": "eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "knife", - "level1": "office_supplies", - "level2": "knife", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "ruler", - "level1": "office_supplies", - "level2": "ruler", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bread", - "level1": "food", - "level2": "bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "small_pieces_of_paper", - "level1": "garbage", - "level2": "small_pieces_of_paper", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "coffee", - "level1": "drink", - "level2": "coffee", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pencil_sharpener", - "level1": "office_supplies", - "level2": "pencil_sharpener", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "grape", - "level1": "fruit", - "level2": "grape", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plastic_sheet", - "level1": "garbage", - "level2": "plastic_sheet", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "teacup", - "level1": "container", - "level2": "teacup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "colored_glue", - "level1": "office_supplies", - "level2": "colored_glue", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "scissor", - "level1": "office_supplies", - "level2": "scissor", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mango", - "level1": "fruit", - "level2": "mango", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "fruit_tray", - "level1": "container", - "level2": "fruit_tray", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "paper_ball", - "level1": "garbage", - "level2": "paper_ball", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "garbage_bin", - "level1": "container", - "level2": "garbage_bin", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "water_bottle", - "level1": "container", - "level2": "water_bottle", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pen", - "level1": "office_supplies", - "level2": "pen", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pen_holder", - "level1": "garbage", - "level2": "spitball", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cola", - "level1": "drink", - "level2": "cola", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "milk", - "level1": "drink", - "level2": "milk", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-2026628", - "dataset_size": "32.3GB", - "statistics": { - "total_episodes": 1070, - "total_frames": 2026628, - "total_tasks": 6, - "total_videos": 3210, - "total_chunks": 2, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "32b7ba5a-b7c4-475c-8bcc-6b60402acd24", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Hand over the waste paper", - "Place the red pencil sharpener into the pen holder", - "place the silver glue into the pen holder", - "Place the cola can in the basket with left gripper", - "Place the waste paper in the basket with left gripper", - "Place the eraser in pen holder with left gripper", - "Grasp the waste paper with right gripper", - "Place the pear in the plate", - "Static", - "Hand over the plastic", - "Grasp the bottle with left gripper", - "pick up the ruler", - "Place the black pen in pen holder with left gripper", - "Grasp the banana with left gripper", - "Pick up the banana", - "Hand over the banana", - "Pick up the pen with right arm", - "Place the cola can in the basket with right gripper", - "Hand over the grapes", - "Place the basket on the table with right gripper", - "Pick up the red pencil sharpener", - "Stand the bottle with left gripper", - "Pick up the waste paper", - "Grasp the blue pen with left gripper", - "pick up the yellow knife", - "Place the eraser into the pen holder", - "Place the black pen in pen holder with right gripper", - "Place the gray pen in pen holder with right gripper", - "Place the gray pen in pen holder with left gripper", - "Place the milk in the basket with right gripper", - "pick up the red glue", - "Hand over the yellow glue", - "Hand over the bottle", - "Place the plastic in the basket", - "Pick up the grapes", - "pick up the pink scissors", - "pick up the silver glue", - "Abnormal", - "Place the blue pen in pen holder with right gripper", - "Place the blue pen in pen holder with left gripper", - "Place the yellow pen in pen holder with right gripper", - "Grasp the cola can with right gripper", - "Grasp the mango with left gripper", - "Grasp the black pen with left gripper", - "Grasp the blue pen with right gripper", - "Grasp the eraser with right gripper", - "Pick up the plastic", - "Place the waste paper in the basket with right gripper", - "Grasp the cola can with left gripper", - "Place the cola can on the table with left gripper", - "Pick up the eraser", - "Grasp the waste paper with left gripper", - "Grasp the yellow pen with right gripper", - "Place the pink scissors into the pen holder", - "Lift the basket with left gripper", - "Pick up the bottle with the right arm", - "Hand over the bread", - "Place the grapes in the plate", - "transfer the pen to the left arm", - "transfer the bottle to the left arm", - "Hand over the red glue", - "End", - "Hand over the yellow knife", - "Hand over the ruler", - "Place the blue scissors into the pen holder", - "Place the yellow pen in pen holder with left gripper", - "Hand over the blue knife", - "Place the pen into the pen holder", - "Pick up the pear", - "transfer the pen to the right arm", - "Place the blue knife into the pen holder", - "Place the mango in the plate with left gripper", - "pick up the yellow glue", - "Hand over the pear", - "Place the blue pencil sharpener into the pen holder", - "Grasp the black pen with right gripper", - "Hand over the pink scissors", - "Pick up the bottle with the left arm", - "Grasp the gray pen with right gripper", - "Grasp the eraser with left gripper", - "Place the eraser in pen holder with right gripper", - "Grasp the basket with left gripper", - "Pick up the pen with left arm", - "Pick up the blue pencil sharpener", - "pick up the bread", - "Hand over the pen", - "Place the milk in the basket with left gripper", - "place the red glue into the pen holder", - "Place the banana in the plate with left gripper", - "Hand the basket to the right gripper with the left gripper", - "place the bread in the plate", - "Grasp the yellow pen with left gripper", - "Hand over the blue scissors", - "place the yellow glue into the pen holder", - "pick up the blue knife", - "Place the cola can on the table with right gripper", - "Place the bottle", - "Grasp the gray pen with left gripper", - "Place the ruler into the pen holder", - "pick up the blue scissors", - "Grasp the milk with right gripper", - "Place the banana in the plate", - "Grasp the milk with left gripper", - "Place the basket on the table with left gripper", - "Hand over the silver glue", - "Place the waste paper in the basket", - "place the yellow knife into the pen holder", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } }, "authors": { "contributed_by": [ { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" } ] }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_desktop_organization_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)", - "structure": "Cobot_Magic_desktop_organization_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ ├── chunk-000/\n│ │ ├── episode_000000.parquet\n│ │ ├── episode_000001.parquet\n│ │ ├── episode_000002.parquet\n│ │ ├── episode_000003.parquet\n│ │ ├── episode_000004.parquet\n│ │ └── (...)\n│ └── chunk-001/\n│ ├── episode_001000.parquet\n│ ├── episode_001001.parquet\n│ ├── episode_001002.parquet\n│ ├── episode_001003.parquet\n│ ├── episode_001004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n ├── chunk-000/\n │ ├── observation.images.cam_high_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ ├── observation.images.cam_left_wrist_rgb/\n │ │ ├── episode_000000.mp4\n │ │ ├── episode_000001.mp4\n │ │ ├── episode_000002.mp4\n │ │ ├── episode_000003.mp4\n │ │ ├── episode_000004.mp4\n │ │ └── (...)\n │ └── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── chunk-001/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_001000.mp4\n │ ├── episode_001001.mp4\n │ ├── episode_001002.mp4\n │ ├── episode_001003.mp4\n │ ├── episode_001004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_001000.mp4\n ├── episode_001001.mp4\n ├── episode_001002.mp4\n ├── episode_001003.mp4\n ├── episode_001004.mp4\n └── (...)" + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "AgiBot-g1_box_storage_a": { - "path": "AgiBot-g1_box_storage_a", - "dataset_name": "box_storage_a", + "Cobot_Magic_plate_storage_bread": { + "path": "Cobot_Magic_plate_storage_bread", + "dataset_name": "plate_storage_bread", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -115419,10 +124810,10 @@ "scene_type": [], "atomic_actions": [ "grasp", - "place", - "pick" + "pick", + "place" ], - "tasks": "Place the mouse and the power cord paper box into the container.", + "tasks": "Grasp the yellow bread", "objects": [ { "object_name": "table", @@ -115433,27 +124824,35 @@ "level5": null }, { - "object_name": "box", + "object_name": "bread", + "level1": "food", + "level2": "bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", "level1": "container", - "level2": "box", + "level2": "plate", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-11770", - "dataset_size": "4.8GB", + "operation_platform_height": 77.2, + "frame_range": "0-27875", + "dataset_size": "515.0MB", "statistics": { - "total_episodes": 21, - "total_frames": 11770, + "total_episodes": 100, + "total_frames": 27875, "total_tasks": 1, - "total_videos": 168, + "total_videos": 300, "total_chunks": 1, "chunks_size": 1000, - "fps": 30 + "fps": 50 }, - "dataset_uuid": "a1e4a24d-cfb7-4719-a86c-d455555bc395", + "dataset_uuid": "f4001f44-f88b-4812-a364-a7f937d58496", "language": [ "en", "zh" @@ -115462,8 +124861,8 @@ "robotics" ], "sub_tasks": [ - "Place the mouse and the power cord paper box into the container.", - "Pick up the mouse and the power cord paper box.", + "Grasp the yellow bread", + "Place the bread into the yellow plate", "null" ], "annotations": { @@ -115501,61 +124900,70 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AgiBot-g1_box_storage_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_box_storage_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Cobot_Magic_plate_storage_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_plate_storage_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "alpha_bot_2_press_the_button_b": { - "path": "alpha_bot_2_press_the_button_b", - "dataset_name": "press_the_button_b", + "leju_robot_box_storage_parcel_h": { + "path": "leju_robot_box_storage_parcel_h", + "dataset_name": "box_storage_parcel_h", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "five_finger_hand" ], "scene_type": [], "atomic_actions": [ - "pressbutton", - "push" + "grasp", + "pick", + "place" ], - "tasks": "Touch the bottle with left gripper", + "tasks": "Place the package into the parcel locker.", "objects": [ { - "object_name": "mineral_water", - "level1": "drinks", - "level2": "mineral_water", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "button", - "level1": "toy", - "level2": "button", + "object_name": "box", + "level1": "home_storage", + "level2": "box", "level3": null, "level4": null, "level5": null }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "parcel", + "level1": "container", + "level2": "parcel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "conveyor_belt", + "level1": "industrial_equipment", + "level2": "conveyor_belt", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-50921", - "dataset_size": "943.7MB", + "frame_range": "0-149279", + "dataset_size": "7.7GB", "statistics": { - "total_episodes": 65, - "total_frames": 50921, + "total_episodes": 389, + "total_frames": 149279, "total_tasks": 1, - "total_videos": 260, + "total_videos": 1167, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "627d6f91-a0ba-46b7-96dd-616242c3a6af", + "dataset_uuid": "6dd99999-811b-4ae1-aac2-7a5b81087eaa", "language": [ "en", "zh" @@ -115564,11 +124972,10 @@ "robotics" ], "sub_tasks": [ - "Touch the bottle with left gripper", - "End", - "Move the bottle away with right gripper", - "Abnormal", - "Press the button with right gripper", + "Place the package into the parcel locker.", + "Pick up the package from the inbound machine.", + "Pick up the package from the conveyor belt.", + "Place the package onto the inbound machine.", "null" ], "annotations": { @@ -115606,21 +125013,20 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "alpha_bot_2_press_the_button_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "alpha_bot_2_press_the_button_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "leju_robot_box_storage_parcel_h_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "leju_robot_box_storage_parcel_h_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "AgiBot-g1_box_storage_tool": { - "path": "AgiBot-g1_box_storage_tool", - "dataset_name": "box_storage_tool", + "G1edu-u3_pick_cup_a": { + "path": "G1edu-u3_pick_cup_a", + "dataset_name": "pick_cup_a", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "three_finger_hand" ], "scene_type": [], "atomic_actions": [ "grasp", - "pick", - "place" + "pick" ], "tasks": "End", "objects": [ @@ -115633,35 +125039,27 @@ "level5": null }, { - "object_name": "box", - "level1": "home_storage", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tool", - "level1": "tool", - "level2": "tool", + "object_name": "cup", + "level1": "kitchen_supplies", + "level2": "cup", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-32860", - "dataset_size": "1.2GB", + "frame_range": "0-8046", + "dataset_size": "88.4MB", "statistics": { - "total_episodes": 99, - "total_frames": 32860, + "total_episodes": 24, + "total_frames": 8046, "total_tasks": 1, - "total_videos": 297, + "total_videos": 24, "total_chunks": 1, - "chunks_size": 1000, + "chunks_size": 24, "fps": 30 }, - "dataset_uuid": "4ed6d684-19e6-4881-9b9f-eabbb1a6789d", + "dataset_uuid": "6c4e6e81-bded-43dc-a803-02147d6eb107", "language": [ "en", "zh" @@ -115671,9 +125069,8 @@ ], "sub_tasks": [ "End", - "Abnormal", - "Grasp the data cable", - "Place the data cable in the another box", + "Grasp the paper cup and lift it to the center of the view with right gripper", + "Grasp the paper cup and lift it to the center of the view with left gripper", "null" ], "annotations": { @@ -115711,452 +125108,581 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AgiBot-g1_box_storage_tool_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_box_storage_tool_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "G1edu-u3_pick_cup_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_pick_cup_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "R1_Lite_move_the_position_of_the_duck": { - "path": "R1_Lite_move_the_position_of_the_duck", - "dataset_name": "move_the_position_of_the_duck", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" + "Agilex_Cobot_Magic_move_mouse_pen_green_tablecloth": { + "task_categories": [ + "robotics" ], - "scene_type": [], - "atomic_actions": [ - "place", - "pick", - "grasp" + "language": [ + "en" ], - "tasks": "Grasp the red duck with right gripper", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_move_mouse_pen_green_tablecloth", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "office & workspace", + "level2": "office", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { "object_name": "table", - "level1": "furniture", + "level1": "home_storage", "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "pen", - "level1": "office_supplies", - "level2": "pen", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "fruit", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bath_ball", - "level1": "daily_necessities", - "level2": "bath_ball", + "object_name": "deli_water-based_marker", + "level1": "stationery", + "level2": "deli_water-based_marker", "level3": null, "level4": null, "level5": null }, { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", + "object_name": "notebook", + "level1": "stationery", + "level2": "notebook", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "container", - "level2": "plate", + "object_name": "mouse", + "level1": "appliances", + "level2": "mouse", "level3": null, "level4": null, "level5": null }, { - "object_name": "can", - "level1": "container", - "level2": "can", + "object_name": "mouse_pad", + "level1": "appliances", + "level2": "mouse_pad", "level3": null, "level4": null, "level5": null }, { - "object_name": "eraser", - "level1": "office_supplies", - "level2": "eraser", + "object_name": "green_table_cloths", + "level1": "laboratory_supplies", + "level2": "green_table_cloths", "level3": null, "level4": null, "level5": null - }, + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "the left gripper organize the pen on the notebook then the right gripper organize the mouse on the mouse pad." + ], + "sub_tasks": [ { - "object_name": "hard_cleanser", - "level1": "daily_necessities", - "level2": "hard_cleanser", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the mouse on the mouse pad with the left gripper", + "subtask_index": 0 }, { - "object_name": "long_bread", - "level1": "food", - "level2": "long_bread", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the marker with the left gripper", + "subtask_index": 1 }, { - "object_name": "peeler", - "level1": "tool", - "level2": "peeler", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the mouse with the right gripper", + "subtask_index": 2 }, { - "object_name": "block", - "level1": "toy", - "level2": "block", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the mouse on the mouse pad with the right gripper", + "subtask_index": 3 }, { - "object_name": "duck", - "level1": "toy", - "level2": "duck", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the marker on the notebook with the right gripper", + "subtask_index": 4 }, { - "object_name": "soap", - "level1": "daily_necessities", - "level2": "soap", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the marker with the right gripper", + "subtask_index": 5 }, { - "object_name": "box", - "level1": "container", - "level2": "box", - "level3": null, - "level4": null, - "level5": null + "subtask": "Grasp the mouse with the left gripper", + "subtask_index": 6 }, { - "object_name": "basket", - "level1": "container", - "level2": "basket", - "level3": null, - "level4": null, - "level5": null + "subtask": "end", + "subtask_index": 7 }, { - "object_name": "cola", - "level1": "drink", - "level2": "cola", - "level3": null, - "level4": null, - "level5": null + "subtask": "Place the marker on the notebook with the left gripper", + "subtask_index": 8 }, { - "object_name": "detergent", - "level1": "daily_necessities", - "level2": "detergent", - "level3": null, - "level4": null, - "level5": null + "subtask": "null", + "subtask_index": 9 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 30, + "total_frames": 19527, + "fps": 30, + "total_tasks": 10, + "total_videos": 90, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "316.01 MB" + }, + "frame_num": 19527, + "dataset_size": "316.01 MB", + "data_structure": "Agilex_Cobot_Magic_move_mouse_pen_green_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (18 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:29" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } }, - { - "object_name": "egg_beater", - "level1": "electrical_appliances", - "level2": "egg_beater", - "level3": null, - "level4": null, - "level5": null + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } }, - { - "object_name": "towel", - "level1": "clothing", - "level2": "towel", - "level3": null, - "level4": null, - "level5": null + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } }, - { - "object_name": "orange", - "level1": "fruit", - "level2": "orange", - "level3": null, - "level4": null, - "level5": null + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] }, - { - "object_name": "peach", - "level1": "fruit", - "level2": "peach", - "level3": null, - "level4": null, - "level5": null + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] }, - { - "object_name": "marker", - "level1": "office_supplies", - "level2": "marker", - "level3": null, - "level4": null, - "level5": null + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null }, - { - "object_name": "rubiks_cube", - "level1": "toy", - "level2": "rubiks_cube", - "level3": null, - "level4": null, - "level5": null + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null }, - { - "object_name": "bread_slice", - "level1": "food", - "level2": "bread_slice", - "level3": null, - "level4": null, - "level5": null + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null }, - { - "object_name": "brush", - "level1": "daily_necessities", - "level2": "brush", - "level3": null, - "level4": null, - "level5": null + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null }, - { - "object_name": "yogurt", - "level1": "drink", - "level2": "yogurt", - "level3": null, - "level4": null, - "level5": null + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null }, - { - "object_name": "power_strip", - "level1": "electric_appliance", - "level2": "power_strip", - "level3": null, - "level4": null, - "level5": null + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] }, - { - "object_name": "milk", - "level1": "drink", - "level2": "milk", - "level3": null, - "level4": null, - "level5": null + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] }, - { - "object_name": "soda", - "level1": "drink", - "level2": "soda", - "level3": null, - "level4": null, - "level5": null + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] }, - { - "object_name": "lime", - "level1": "fruit", - "level2": "lime", - "level3": null, - "level4": null, - "level5": null + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] }, - { - "object_name": "coffee_capsule", - "level1": "drink", - "level2": "coffee_capsule", - "level3": null, - "level4": null, - "level5": null + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] }, - { - "object_name": "dish", - "level1": "container", - "level2": "dish", - "level3": null, - "level4": null, - "level5": null + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] }, - { - "object_name": "glass", - "level1": "furniture", - "level2": "glass", - "level3": null, - "level4": null, - "level5": null + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] }, - { - "object_name": "egg_yolk_pastry", - "level1": "food", - "level2": "egg_yolk_pastry", - "level3": null, - "level4": null, - "level5": null + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] }, - { - "object_name": "glasses_case", - "level1": "daily_necessities", - "level2": "glasses_case", - "level3": null, - "level4": null, - "level5": null + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] }, - { - "object_name": "gum", - "level1": "daily_necessities", - "level2": "gum", - "level3": null, - "level4": null, - "level5": null + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] }, - { - "object_name": "tape", - "level1": "daily_necessities", - "level2": "tape", - "level3": null, - "level4": null, - "level5": null + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] }, - { - "object_name": "soft_cleanser", - "level1": "daily_necessities", - "level2": "soft_cleanser", - "level3": null, - "level4": null, - "level5": null + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] }, - { - "object_name": "chips", - "level1": "food", - "level2": "chips", - "level3": null, - "level4": null, - "level5": null + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] }, - { - "object_name": "chocolate", - "level1": "food", - "level2": "chocolate", - "level3": null, - "level4": null, - "level5": null + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] }, - { - "object_name": "cookie", - "level1": "food", - "level2": "cookie", - "level3": null, - "level4": null, - "level5": null + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] }, - { - "object_name": "spoon", - "level1": "tableware", - "level2": "spoon", - "level3": null, - "level4": null, - "level5": null + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } - ], - "operation_platform_height": 77.2, - "frame_range": "0-12223", - "dataset_size": "348.2MB", - "statistics": { - "total_episodes": 40, - "total_frames": 12223, - "total_tasks": 1, - "total_videos": 160, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "da6ac6c0-2744-4f18-98b8-d87cbfcc8ce1", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the red duck with right gripper", - "Static", - "Place the red duck on the table with right gripper", - "Grasp the red duck with left gripper", - "End", - "Place the red duck on the table with left gripper", - "Abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" }, "authors": { "contributed_by": [ { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" } ] }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_move_the_position_of_the_duck_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_move_the_position_of_the_duck_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "RMC-AIDA-L_place_test_tube": { - "path": "RMC-AIDA-L_place_test_tube", - "dataset_name": "place_test_tube", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, + "AgiBot-g1_box_storage_cardboard_box_b": { + "path": "AgiBot-g1_box_storage_cardboard_box_b", + "dataset_name": "box_storage_cardboard_box_b", "robot_type": "", "end_effector_type": [ "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ - "grasp", "place", "pick", - "move" + "grasp" ], - "tasks": "Place the test tube on the test tube rack with the left gripper ", + "tasks": "Place the mouse and the power cord paper box into the container.", "objects": [ { "object_name": "table", @@ -116167,35 +125693,35 @@ "level5": null }, { - "object_name": "test_tube_rack", - "level1": "container", - "level2": "test_tube_rack", + "object_name": "paper_box", + "level1": "tool", + "level2": "paper_box", "level3": null, "level4": null, "level5": null }, { - "object_name": "test_tube", - "level1": "container", - "level2": "test_tube", + "object_name": "carton", + "level1": "tool", + "level2": "carton", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-365863", - "dataset_size": "4.0GB", + "operation_platform_height": null, + "frame_range": "0-115855", + "dataset_size": "56.9GB", "statistics": { - "total_episodes": 648, - "total_frames": 365863, + "total_episodes": 247, + "total_frames": 115855, "total_tasks": 1, - "total_videos": 1944, + "total_videos": 1976, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "7ed8e496-1917-4844-a0f7-98fe52c1477d", + "dataset_uuid": "7e28f5b3-7a48-4d52-b01e-390390932ffd", "language": [ "en", "zh" @@ -116204,17 +125730,8 @@ "robotics" ], "sub_tasks": [ - "Place the test tube on the test tube rack with the left gripper ", - "End", - "Pass the test tube from the left gripper to the right gripper", - "pick up the test tube with the right gripper ", - "Place the test tubes on the test tube rack with the left gripper", - "move the test tube from the right gripper to the left gripper ", - "Pick up the test tube with the right gripper", - "Grasp the arch-shaped block with the left gripper", - "Pick up the test tube with the left gripper", - "Place the test tubes on the test tube rack with the right gripper", - "Pass the test tube from the right gripper to the left gripper", + "Place the mouse and the power cord paper box into the container.", + "Pick up the mouse and the power cord paper box.", "null" ], "annotations": { @@ -116252,15 +125769,514 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "RMC-AIDA-L_place_test_tube_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_place_test_tube_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AgiBot-g1_box_storage_cardboard_box_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_box_storage_cardboard_box_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "leju_robot_moving_parts_m": { - "path": "leju_robot_moving_parts_m", - "dataset_name": "moving_parts_m", + "Galaxea_R1_Lite_change_baai_into_brain": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Galaxea_R1_Lite_change_baai_into_brain", + "dataset_uuid": "75dad2ee-672e-402a-823a-198e5e42af62", + "scene_type": { + "level1": "eduction", + "level2": "school", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "buiding_blocks", + "level1": "toys", + "level2": "buiding_blocks", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "rearrange the word baai as brain." + ], + "sub_tasks": [ + { + "subtask": "Disassemble the second character A", + "subtask_index": 0 + }, + { + "subtask": "Place the character R between first character B and third character A", + "subtask_index": 1 + }, + { + "subtask": "End", + "subtask_index": 2 + }, + { + "subtask": "Place the character N after character I", + "subtask_index": 3 + }, + { + "subtask": "null", + "subtask_index": 4 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Galaxea_R1_Lite" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_left_rgb", + "cam_head_right_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 67, + "total_frames": 83804, + "fps": 30, + "total_tasks": 5, + "total_videos": 268, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "3.61 GB" + }, + "frame_num": 83804, + "dataset_size": "3.61 GB", + "data_structure": "Galaxea_R1_Lite_change_baai_into_brain_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (55 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:66" + }, + "features": { + "observation.images.cam_head_left_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + }, + "R1_Lite_put_the_tableware_into_the_cupboard": { + "path": "R1_Lite_put_the_tableware_into_the_cupboard", + "dataset_name": "put_the_tableware_into_the_cupboard", "robot_type": "", "end_effector_type": [ - "five_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ @@ -116268,46 +126284,70 @@ "pick", "place" ], - "tasks": "Place the black part on the table with right gripper", + "tasks": "Place the fork in the cutlery box", "objects": [ { - "object_name": "table", + "object_name": "cabinet", "level1": "furniture", - "level2": "table", + "level2": "cabinet", "level3": null, "level4": null, "level5": null }, { - "object_name": "parts", - "level1": "mechanical_parts", - "level2": "parts", + "object_name": "bowl", + "level1": "container", + "level2": "bowl", "level3": null, "level4": null, "level5": null }, { - "object_name": "cabinet", - "level1": "home_storage", - "level2": "cabinet", + "object_name": "chopsticks", + "level1": "tableware", + "level2": "chopsticks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "spoon", + "level1": "tableware", + "level2": "spoon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "fork", + "level1": "tableware", + "level2": "fork", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "container", + "level2": "plate", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-742399", - "dataset_size": "50.9GB", + "frame_range": "0-367607", + "dataset_size": "15.5GB", "statistics": { - "total_episodes": 490, - "total_frames": 742399, + "total_episodes": 143, + "total_frames": 367607, "total_tasks": 1, - "total_videos": 1470, + "total_videos": 429, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "fb9bf451-cf8b-4da5-b5c1-13529609f049", + "dataset_uuid": "49bc9e7b-4491-4846-95a2-082b0e4f9178", "language": [ "en", "zh" @@ -116316,14 +126356,33 @@ "robotics" ], "sub_tasks": [ - "Place the black part on the table with right gripper", - "Pick up the large material from the shelf.", - "Insert the large material into the corresponding slot on the workbench.", + "Place the fork in the cutlery box", + "Place the spoon on the plate", + "Take the bowl out of the cupboard", + "Pick up the bowl and place it in the cupboard", + "Take the chopsticks out of the cupboard", + "Pull apart the shelf", + "Pick up the chopsticks and place it in the cupboard", "End", - "Grasp the black part with right gripper", - "Move the large material to the workbench.", - "Move to the initial position of the shelf.", - "Move to the table behind body", + "Take the plate out of the cupboard", + "Place the bowl on the shelf", + "Take the fork out of the cupboard", + "Abnormal", + "Place the plate on the table", + "Pick up the plate and place it in the cupboard", + "Place a chopstick on the table", + "Place the plate on the shelf", + "Pull apart the cupboard", + "Place the fork on the table", + "Place a pair of chopsticks in the cutlery box", + "Take the spoon out of the cupboard", + "Place the bowl on the plate", + "Place the spoon on the table", + "Place the spoon on the cutlery box", + "Open the cupboard", + "Pick up the spoon and place it in the cupboard", + "Close the cupboard", + "Pick up the fork and place it in the cupboard", "null" ], "annotations": { @@ -116361,23 +126420,24 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "leju_robot_moving_parts_m_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_moving_parts_m_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "R1_Lite_put_the_tableware_into_the_cupboard_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_put_the_tableware_into_the_cupboard_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "AIRBOT_MMK2_stacking_blocks": { - "path": "AIRBOT_MMK2_stacking_blocks", - "dataset_name": "stacking_blocks", + "Cobot_Magic_fold_the_towel": { + "path": "Cobot_Magic_fold_the_towel", + "dataset_name": "fold_the_towel", "robot_type": "", "end_effector_type": [ - "five_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ "grasp", + "place", "pick", - "place" + "fold" ], - "tasks": "Place the yellow cube on the orange cube with the right gripper", + "tasks": "End", "objects": [ { "object_name": "table", @@ -116388,27 +126448,35 @@ "level5": null }, { - "object_name": "building_block", - "level1": "toy", - "level2": "building_block", + "object_name": "tablecloth", + "level1": "clothing", + "level2": "tablecloth", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "towel", + "level1": "clothing", + "level2": "towel", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-52766", - "dataset_size": "2.2GB", + "frame_range": "0-235124", + "dataset_size": "5.1GB", "statistics": { - "total_episodes": 213, - "total_frames": 52766, - "total_tasks": 5, - "total_videos": 852, + "total_episodes": 177, + "total_frames": 235124, + "total_tasks": 3, + "total_videos": 531, "total_chunks": 1, "chunks_size": 1000, - "fps": 30 + "fps": 50 }, - "dataset_uuid": "cdff2820-6ed6-414f-add9-1454a57a81b2", + "dataset_uuid": "15a5b235-0337-4484-9e0f-be8def7a8879", "language": [ "en", "zh" @@ -116417,30 +126485,14 @@ "robotics" ], "sub_tasks": [ - "Place the yellow cube on the orange cube with the right gripper", - "Static", - "Grasp the blue rhombic cuboid block with the right gripper", - "Grasp the red cuboid block with the left gripper", - "Grasp the blue small cube with the left gripper", - "Place the red small cube on the blue small cube with the right gripper", - "Place the red cuboid block in the center of view with the left gripper", - "Grasp the big red cube with the left gripper", - "Abnormal", - "Place the orange cube on the yellow cube with the right gripper", - "Place the green small cube on the blue small cube with the right gripper", - "Grasp the green small cube with the right gripper", - "Grasp the red small cube with the right gripper", - "Grasp the blue cube with the right gripper", - "Place the blue cube on the red cube with the left gripper", - "Grasp the orange cube with the right gripper", - "Grasp the blue cube with the left gripper", - "Place the blue small cube on the small table with the left gripper", "End", - "Place the blue rhombic cuboid block on the red cuboid block with the right gripper", - "Place the blue small cube on the yellow small cube with the left gripper", - "Place the big red cube in the center of view with the left gripper", - "Grasp the yellow cube with the right gripper", - "Place theblue cube on the big red cube with the right gripper", + "Fold the towel upwards with the both gripper", + "Fold the towel upwards with the right gripper", + "Tidy up the towels", + "Fold the towel from right to left with the right gripper", + "Fold the towel upwards with the left gripper", + "Fold the towel", + "abnormal", "null" ], "annotations": { @@ -116478,10 +126530,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_stacking_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_stacking_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Cobot_Magic_fold_the_towel_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_fold_the_towel_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "agilex_cobot_magic_pass_object_right_to_left_red_tablecloth": { + "Airbot_MMK2_storage_cake_both_hands": { "task_categories": [ "robotics" ], @@ -116501,369 +126553,113 @@ ], "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", "extra_gated_fields": { - "Company/Organization": { - "type": "text", - "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" - }, - "Country": { - "type": "country", - "description": "e.g., \"Germany\", \"China\", \"United States\"" - } - }, - "codebase_version": "v2.1", - "dataset_name": "agilex_cobot_magic_pass_object_right_to_left_red_tablecloth", - "dataset_uuid": "00000000-0000-0000-0000-000000000000", - "scene_type": { - "level1": "commercial_convenience", - "level2": "supermarket", - "level3": null, - "level4": null, - "level5": null - }, - "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", - "objects": [ - { - "object_name": "table", - "level1": "home_storage", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "ambrosial_yogurt", - "level1": "food", - "level2": "ambrosial_yogurt", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "food", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "long_bread", - "level1": "food", - "level2": "long_bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "milk", - "level1": "food", - "level2": "milk", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yogurt", - "level1": "food", - "level2": "yogurt", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "grape", - "level1": "food", - "level2": "grape", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "ham_sausage", - "level1": "food", - "level2": "ham_sausage", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "eggplant", - "level1": "food", - "level2": "eggplant", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chewing_gum", - "level1": "food", - "level2": "chewing_gum", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "eyeglass_case", - "level1": "laboratory_supplies", - "level2": "eyeglass_case", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "rubik's_cube", - "level1": "toys", - "level2": "rubik's_cube", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "purple_trash_bag", - "level1": "trash", - "level2": "purple_trash_bag", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cleanser", - "level1": "daily_necessities", - "level2": "cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bathing_in_flowers", - "level1": "daily_necessities", - "level2": "bathing_in_flowers", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "whiteboard_eraser", - "level1": "stationery", - "level2": "whiteboard_eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "candle", - "level1": "daily_necessities", - "level2": "candle", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "red_table_cloths", - "level1": "laboratory_supplies", - "level2": "red_table_cloths", - "level3": null, - "level4": null, - "level5": null - } - ], - "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", - "task_instruction": [ - "use the right gripper to pick up the item and transfer it from the right gripper to the left gripper." - ], - "sub_tasks": [ - { - "subtask": "The left gripper places milk on the left side of the table", - "subtask_index": 0 - }, - { - "subtask": "The left gripper places anmuxi on the left side of the table", - "subtask_index": 1 - }, - { - "subtask": "The left gripper places Rubik's Cube on the left side of the table", - "subtask_index": 2 - }, - { - "subtask": "Pass the blue blackboard erasure to the left gripper", - "subtask_index": 3 - }, - { - "subtask": "End", - "subtask_index": 4 - }, - { - "subtask": "Pass the eggplant to the left gripper", - "subtask_index": 5 - }, - { - "subtask": "Pass the Rubik's Cube to the left gripper", - "subtask_index": 6 - }, - { - "subtask": "The left gripper places yogurt on the left side of the table", - "subtask_index": 7 - }, - { - "subtask": "Pass the square chewing gum to the left gripper", - "subtask_index": 8 - }, - { - "subtask": "Use the right gripper to grab the grape on the right side of the table", - "subtask_index": 9 - }, - { - "subtask": "Pass the yogurt to the left gripper", - "subtask_index": 10 - }, - { - "subtask": "The left gripper places eggplant on the left side of the table", - "subtask_index": 11 - }, - { - "subtask": "The left gripper places grape on the left side of the table", - "subtask_index": 12 - }, - { - "subtask": "Use the right gripper to grab the milk on the right side of the table", - "subtask_index": 13 - }, - { - "subtask": "Pass the milk to the left gripper", - "subtask_index": 14 - }, - { - "subtask": "Use the right gripper to grab the eggplant on the right side of the table", - "subtask_index": 15 - }, - { - "subtask": "The left gripper places xx on the left side of the table", - "subtask_index": 16 - }, - { - "subtask": "Pass the shower sphereto the left gripper", - "subtask_index": 17 - }, - { - "subtask": "Use the right gripper to grab the shower sphere on the right side of the table", - "subtask_index": 18 - }, - { - "subtask": "Use the right gripper to grab the Anmuxi on the right side of the table.", - "subtask_index": 19 - }, - { - "subtask": "Use the right gripper to grab the xx on the right side of the table", - "subtask_index": 20 - }, - { - "subtask": "The left gripper places shower sphere on the left side of the table", - "subtask_index": 21 - }, - { - "subtask": "Use the right gripper to grab the blue blackboard erasure on the right side of the table", - "subtask_index": 22 - }, - { - "subtask": "Use the right gripper to grab the plush banana on the right side of the table", - "subtask_index": 23 - }, - { - "subtask": "Pass the plush banana to the left gripper", - "subtask_index": 24 - }, - { - "subtask": "Pass the shower sphere to the left gripper", - "subtask_index": 25 - }, - { - "subtask": "Pass the to the yogurt left gripper", - "subtask_index": 26 - }, - { - "subtask": "The left gripper places blue blackboard erasure on the left side of the table", - "subtask_index": 27 - }, - { - "subtask": "The left gripper places plush banana on the left side of the table", - "subtask_index": 28 - }, - { - "subtask": "Use the right gripper to grab the square chewing gum on the right side of the table", - "subtask_index": 29 - }, - { - "subtask": "The left gripper places square chewing gum on the left side of the table", - "subtask_index": 30 + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_storage_cake_both_hands", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "commercial_convenience", + "level2": "supermarket", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ { - "subtask": "The left gripper places eyeglass case on the left side of the table", - "subtask_index": 31 + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Pass the xx to the left gripper", - "subtask_index": 32 + "object_name": "cakes", + "level1": "bread", + "level2": "cakes", + "level3": null, + "level4": null, + "level5": null }, { - "subtask": "Pass the eyeglass case to the left gripper", - "subtask_index": 33 - }, + "object_name": "lid", + "level1": "laboratory_supplies", + "level2": "lid", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "put two cakes in the lid." + ], + "sub_tasks": [ { - "subtask": "Use the right gripper to grab the yogurt on the right side of the table", - "subtask_index": 34 + "subtask": "Grasp the cake with the left gripper", + "subtask_index": 0 }, { - "subtask": "Use the right gripper to grab the eyeglass case on the right side of the table", - "subtask_index": 35 + "subtask": "Place the cake on the white basket with the right gripper", + "subtask_index": 1 }, { - "subtask": "Pass the yogurt to the left gripper", - "subtask_index": 36 + "subtask": "Grasp the cake with the right gripper", + "subtask_index": 2 }, { - "subtask": "Use the right gripper to grab the Rubik's Cube on the right side of the table", - "subtask_index": 37 + "subtask": "Place the cake on the white basket with the left gripper", + "subtask_index": 3 }, { - "subtask": "Pass the anmuxi to the left gripper", - "subtask_index": 38 + "subtask": "Static", + "subtask_index": 4 }, { - "subtask": "Pass the grape to the left gripper", - "subtask_index": 39 + "subtask": "End", + "subtask_index": 5 }, { "subtask": "null", - "subtask_index": 40 + "subtask_index": 6 } ], "atomic_actions": [ "grasp", - "lift", - "lower", - "handover", - "takeover" + "pick", + "place" ], "robot_name": [ - "agilex_cobot_magic" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -116874,23 +126670,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 99, - "total_frames": 58861, + "total_episodes": 47, + "total_frames": 4445, "fps": 30, - "total_tasks": 41, - "total_videos": 297, + "total_tasks": 7, + "total_videos": 188, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "1.43 GB" + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "188.05 MB" }, - "frame_num": 58861, - "dataset_size": "1.43 GB", - "data_structure": "Agilex_Cobot_Magic_pass_object_right_to_left_red_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 4445, + "dataset_size": "188.05 MB", + "data_structure": "Airbot_MMK2_storage_cake_both_hands_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:98" + "train": "0:46" }, "features": { "observation.images.cam_head_rgb": { @@ -116962,10 +126758,33 @@ "has_audio": false } }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, "observation.state": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -116974,32 +126793,42 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -117008,26 +126837,36 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -117067,17 +126906,17 @@ }, "subtask_annotation": { "names": null, - "dtype": "int32", "shape": [ 5 - ] + ], + "dtype": "int32" }, "scene_annotation": { "names": null, - "dtype": "int32", "shape": [ 1 - ] + ], + "dtype": "int32" }, "eef_sim_pose_state": { "names": [ @@ -117094,10 +126933,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_sim_pose_action": { "names": [ @@ -117114,130 +126953,70 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", "shape": [ 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" ], - "dtype": "float32", - "shape": [ - 2 - ] + "dtype": "int32" } }, "authors": { @@ -117259,24 +127038,22 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "R1_Lite_catch_the_water": { - "path": "R1_Lite_catch_the_water", - "dataset_name": "catch_the_water", + "R1_Lite_move_the_position_of_the_black_marker": { + "path": "R1_Lite_move_the_position_of_the_black_marker", + "dataset_name": "move_the_position_of_the_black_marker", "robot_type": "", "end_effector_type": [ "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ - "grasp", - "pick", "place", - "open", - "close" + "pick", + "grasp" ], - "tasks": "Place the cup next to the water dispenser", + "tasks": "Grasp the Marker pen with left gripper", "objects": [ { "object_name": "table", @@ -117287,224 +127064,105 @@ "level5": null }, { - "object_name": "water_dispenser", - "level1": "container", - "level2": "water_dispenser", + "object_name": "pen", + "level1": "office_supplies", + "level2": "pen", "level3": null, "level4": null, "level5": null }, { - "object_name": "cup", + "object_name": "banana", + "level1": "fruit", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bath_ball", + "level1": "daily_necessities", + "level2": "bath_ball", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", "level1": "container", - "level2": "cup", + "level2": "bowl", "level3": null, "level4": null, "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-133503", - "dataset_size": "5.6GB", - "statistics": { - "total_episodes": 148, - "total_frames": 133503, - "total_tasks": 1, - "total_videos": 444, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "071ba9f6-2258-439b-82bd-6fec0968ae25", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the cup next to the water dispenser", - "abnormal", - "Pick up the cup", - "Turn off the faucet", - "Place it under the water dispenser", - "Turn on the faucet", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_catch_the_water_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_catch_the_water_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AIRBOT_MMK2_place_the_glasses_case_and_gold_bars": { - "path": "AIRBOT_MMK2_place_the_glasses_case_and_gold_bars", - "dataset_name": "place_the_glasses_case_and_gold_bars", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place the glasses case on the middle of the table with the left gripper", - "objects": [ + }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "plate", + "level1": "container", + "level2": "plate", "level3": null, "level4": null, "level5": null }, { - "object_name": "glasses_case", + "object_name": "can", "level1": "container", - "level2": "glasses_case", + "level2": "can", "level3": null, "level4": null, "level5": null }, { - "object_name": "gold_bar", - "level1": "metal", - "level2": "gold_bar", + "object_name": "eraser", + "level1": "office_supplies", + "level2": "eraser", "level3": null, "level4": null, "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-10180", - "dataset_size": "412.7MB", - "statistics": { - "total_episodes": 49, - "total_frames": 10180, - "total_tasks": 1, - "total_videos": 196, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "fc8bbdff-5847-4a9e-b1c1-634bab1b4ed7", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the glasses case on the middle of the table with the left gripper", - "End", - "Static", - "Place the gold bar on the glasses case with the right gripper", - "Grasp the glasses case with the left gripper", - "Grasp the gold bar the right gripper", - "Abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_place_the_glasses_case_and_gold_bars_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_place_the_glasses_case_and_gold_bars_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AgiBot-g1_box_storage_part_a": { - "path": "AgiBot-g1_box_storage_part_a", - "dataset_name": "box_storage_part_a", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "place", - "pick", - "grasp" - ], - "tasks": "Pick up the IO bracket from the container box.", - "objects": [ + }, { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "hard_cleanser", + "level1": "daily_necessities", + "level2": "hard_cleanser", "level3": null, "level4": null, "level5": null }, { - "object_name": "part", + "object_name": "long_bread", + "level1": "food", + "level2": "long_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peeler", "level1": "tool", - "level2": "part", + "level2": "peeler", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "block", + "level1": "toy", + "level2": "block", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "duck", + "level1": "toy", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "soap", + "level1": "daily_necessities", + "level2": "soap", "level3": null, "level4": null, "level5": null @@ -117518,27 +127176,243 @@ "level5": null }, { - "object_name": "tray", + "object_name": "basket", "level1": "container", - "level2": "tray", + "level2": "basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cola", + "level1": "drink", + "level2": "cola", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "detergent", + "level1": "daily_necessities", + "level2": "detergent", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "egg_beater", + "level1": "electrical_appliances", + "level2": "egg_beater", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "towel", + "level1": "clothing", + "level2": "towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange", + "level1": "fruit", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "fruit", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "marker", + "level1": "office_supplies", + "level2": "marker", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rubiks_cube", + "level1": "toy", + "level2": "rubiks_cube", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread_slice", + "level1": "food", + "level2": "bread_slice", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brush", + "level1": "daily_necessities", + "level2": "brush", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yogurt", + "level1": "drink", + "level2": "yogurt", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "power_strip", + "level1": "electric_appliance", + "level2": "power_strip", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "milk", + "level1": "drink", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "soda", + "level1": "drink", + "level2": "soda", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "lime", + "level1": "fruit", + "level2": "lime", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "coffee_capsule", + "level1": "drink", + "level2": "coffee_capsule", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "dish", + "level1": "container", + "level2": "dish", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "glass", + "level1": "furniture", + "level2": "glass", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "egg_yolk_pastry", + "level1": "food", + "level2": "egg_yolk_pastry", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "glasses_case", + "level1": "daily_necessities", + "level2": "glasses_case", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "gum", + "level1": "daily_necessities", + "level2": "gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tape", + "level1": "daily_necessities", + "level2": "tape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "soft_cleanser", + "level1": "daily_necessities", + "level2": "soft_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chips", + "level1": "food", + "level2": "chips", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chocolate", + "level1": "food", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cookie", + "level1": "food", + "level2": "cookie", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "spoon", + "level1": "tableware", + "level2": "spoon", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-205995", - "dataset_size": "105.7GB", + "operation_platform_height": 77.2, + "frame_range": "0-15618", + "dataset_size": "536.6MB", "statistics": { - "total_episodes": 335, - "total_frames": 205995, + "total_episodes": 61, + "total_frames": 15618, "total_tasks": 1, - "total_videos": 2680, + "total_videos": 244, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "24378624-9647-4aad-82ac-d822bb7bc50d", + "dataset_uuid": "5d591692-f2ae-4fa8-97d1-9cd1613b1b33", "language": [ "en", "zh" @@ -117547,8 +127421,12 @@ "robotics" ], "sub_tasks": [ - "Pick up the IO bracket from the container box.", - "Place the IO bracket into the ingredient slot.", + "Grasp the Marker pen with left gripper", + "Grasp the Marker pen with right gripper", + "Place the Marker pen on the table with right gripper", + "Static", + "End", + "Place the Marker pen on the table with left gripper", "null" ], "annotations": { @@ -117586,10 +127464,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AgiBot-g1_box_storage_part_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_box_storage_part_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "R1_Lite_move_the_position_of_the_black_marker_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_move_the_position_of_the_black_marker_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_move_book_front": { + "Airbot_MMK2_move_cup_paper_box": { "task_categories": [ "robotics" ], @@ -117619,11 +127497,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_move_book_front", + "dataset_name": "Airbot_MMK2_move_cup_paper_box", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "study_room", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -117631,9 +127509,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "book", - "level1": "stationery", - "level2": "book", + "object_name": "paper_boxes", + "level1": "home_storage", + "level2": "paper_boxes", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pink_plate", + "level1": "kitchen_supplies", + "level2": "pink_plate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "coffee cup", + "level1": "kitchen_supplies", + "level2": "coffee cup", "level3": null, "level4": null, "level5": null @@ -117641,32 +127535,36 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the book with your left hand and hand it to your right hand, then place it on top of another book." + "place the paper box on the paper box with your left hand and put the coffee cup on the plate with your right hand." ], "sub_tasks": [ { - "subtask": "Deliver the yellow book from left gripper to right gripper", + "subtask": "Grasp the mouse box with the left gripper", "subtask_index": 0 }, { - "subtask": "Place the yellow book on the white book with the right gripper", + "subtask": "Place the mouse box on the calculator box with the left gripper", "subtask_index": 1 }, { - "subtask": "Grasp the yellow book with the left gripper", + "subtask": "End", "subtask_index": 2 }, { - "subtask": "Static", + "subtask": "Grasp the coffee cup with the right gripper", "subtask_index": 3 }, { - "subtask": "End", + "subtask": "Place the coffee cup on the white plate with the right gripper", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "Place the coffee cup on the pink plate with the right gripper", "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 } ], "atomic_actions": [ @@ -117706,23 +127604,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 49, - "total_frames": 10519, + "total_episodes": 93, + "total_frames": 25588, "fps": 30, - "total_tasks": 6, - "total_videos": 196, + "total_tasks": 7, + "total_videos": 372, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "396.69 MB" + "dataset_size": "929.19 MB" }, - "frame_num": 10519, - "dataset_size": "396.69 MB", - "data_structure": "Airbot_MMK2_move_book_front_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 25588, + "dataset_size": "929.19 MB", + "data_structure": "Airbot_MMK2_move_cup_paper_box_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (81 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:48" + "train": "0:92" }, "features": { "observation.images.cam_head_rgb": { @@ -118029,170 +127927,67 @@ "left_eef_velocity", "right_eef_velocity" ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_state": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "eef_acc_mag_action": { - "names": [ - "left_eef_acc_mag", - "right_eef_acc_mag" - ], - "shape": [ - 2 - ], - "dtype": "int32" - } - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" - } - ] - }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" - }, - "Cobot_Magic_plate_storage_bread": { - "path": "Cobot_Magic_plate_storage_bread", - "dataset_name": "plate_storage_bread", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Grasp the yellow bread", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bread", - "level1": "food", - "level2": "bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-27875", - "dataset_size": "515.0MB", - "statistics": { - "total_episodes": 100, - "total_frames": 27875, - "total_tasks": 1, - "total_videos": 300, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 50 - }, - "dataset_uuid": "f4001f44-f88b-4812-a364-a7f937d58496", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the yellow bread", - "Place the bread into the yellow plate", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } }, "authors": { "contributed_by": [ { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" } ] }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_plate_storage_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_plate_storage_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "leju_robot_box_storage_parcel_h": { - "path": "leju_robot_box_storage_parcel_h", - "dataset_name": "box_storage_parcel_h", + "AgiBot-g1_box_storage_cardboard_box_c": { + "path": "AgiBot-g1_box_storage_cardboard_box_c", + "dataset_name": "box_storage_cardboard_box_c", "robot_type": "", "end_effector_type": [ - "five_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ - "grasp", + "place", "pick", - "place" + "grasp" ], - "tasks": "Place the package into the parcel locker.", + "tasks": "Place the mouse and the power cord paper box into the container.", "objects": [ { "object_name": "table", @@ -118203,43 +127998,27 @@ "level5": null }, { - "object_name": "box", - "level1": "home_storage", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "parcel", + "object_name": "carton", "level1": "container", - "level2": "parcel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "conveyor_belt", - "level1": "industrial_equipment", - "level2": "conveyor_belt", + "level2": "carton", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-149279", - "dataset_size": "7.7GB", + "frame_range": "0-230771", + "dataset_size": "109.0GB", "statistics": { - "total_episodes": 389, - "total_frames": 149279, + "total_episodes": 476, + "total_frames": 230771, "total_tasks": 1, - "total_videos": 1167, + "total_videos": 3808, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "6dd99999-811b-4ae1-aac2-7a5b81087eaa", + "dataset_uuid": "f5991bf3-3900-460c-adbd-56e87ac4bbc1", "language": [ "en", "zh" @@ -118248,10 +128027,8 @@ "robotics" ], "sub_tasks": [ - "Place the package into the parcel locker.", - "Pick up the package from the inbound machine.", - "Pick up the package from the conveyor belt.", - "Place the package onto the inbound machine.", + "Place the mouse and the power cord paper box into the container.", + "Pick up the mouse and the power cord paper box.", "null" ], "annotations": { @@ -118289,22 +128066,23 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "leju_robot_box_storage_parcel_h_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "leju_robot_box_storage_parcel_h_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AgiBot-g1_box_storage_cardboard_box_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_box_storage_cardboard_box_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "G1edu-u3_pick_cup_a": { - "path": "G1edu-u3_pick_cup_a", - "dataset_name": "pick_cup_a", + "Cobot_Magic_movethe_position_of_the_bluetooth": { + "path": "Cobot_Magic_movethe_position_of_the_bluetooth", + "dataset_name": "movethe_position_of_the_bluetooth", "robot_type": "", "end_effector_type": [ - "three_finger_hand" + "two_finger_gripper" ], "scene_type": [], "atomic_actions": [ "grasp", - "pick" + "pick", + "place" ], - "tasks": "End", + "tasks": "Place bluetooth earbud on the right side of the table", "objects": [ { "object_name": "table", @@ -118315,27 +128093,35 @@ "level5": null }, { - "object_name": "cup", - "level1": "kitchen_supplies", - "level2": "cup", + "object_name": "bluetooth_earbud", + "level1": "electronic_products", + "level2": "bluetooth_earbud", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bluetooth_earphone_case", + "level1": "electronic_products", + "level2": "bluetooth_earphone_case", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-8046", - "dataset_size": "88.4MB", + "operation_platform_height": 77.2, + "frame_range": "0-52508", + "dataset_size": "850.5MB", "statistics": { - "total_episodes": 24, - "total_frames": 8046, - "total_tasks": 1, - "total_videos": 24, + "total_episodes": 198, + "total_frames": 52508, + "total_tasks": 2, + "total_videos": 594, "total_chunks": 1, - "chunks_size": 24, - "fps": 30 + "chunks_size": 1000, + "fps": 50 }, - "dataset_uuid": "6c4e6e81-bded-43dc-a803-02147d6eb107", + "dataset_uuid": "dc390f5c-b242-48d0-86f0-0932b22a8f6a", "language": [ "en", "zh" @@ -118344,9 +128130,10 @@ "robotics" ], "sub_tasks": [ - "End", - "Grasp the paper cup and lift it to the center of the view with right gripper", - "Grasp the paper cup and lift it to the center of the view with left gripper", + "Place bluetooth earbud on the right side of the table", + "Place bluetooth earphone case on the left side of the table", + "Grasp the Bluetooth earphone case", + "Grasp the Bluetooth earbud", "null" ], "annotations": { @@ -118384,10 +128171,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "G1edu-u3_pick_cup_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_pick_cup_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Cobot_Magic_movethe_position_of_the_bluetooth_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_movethe_position_of_the_bluetooth_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Agilex_Cobot_Magic_move_mouse_pen_green_tablecloth": { + "Airbot_MMK2_open_laptop": { "task_categories": [ "robotics" ], @@ -118417,10 +128204,10 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_move_mouse_pen_green_tablecloth", + "dataset_name": "Airbot_MMK2_open_laptop", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "office & workspace", + "level1": "office_workspace", "level2": "office", "level3": null, "level4": null, @@ -118429,125 +128216,74 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "home_storage", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "deli_water-based_marker", - "level1": "stationery", - "level2": "deli_water-based_marker", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "notebook", - "level1": "stationery", - "level2": "notebook", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mouse", - "level1": "appliances", - "level2": "mouse", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mouse_pad", + "object_name": "laptop", "level1": "appliances", - "level2": "mouse_pad", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "green_table_cloths", - "level1": "laboratory_supplies", - "level2": "green_table_cloths", + "level2": "laptop", "level3": null, "level4": null, - "level5": null + "level5:operation_platform_height": 77.2 } ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "the left gripper organize the pen on the notebook then the right gripper organize the mouse on the mouse pad." + "press the laptop with your right hand and then open it with your left hand." ], "sub_tasks": [ { - "subtask": "Place the mouse on the mouse pad with the left gripper", + "subtask": "Press the laptop with the right gripper", "subtask_index": 0 }, { - "subtask": "Grasp the marker with the left gripper", + "subtask": "Open the laptop with the left gripper", "subtask_index": 1 }, { - "subtask": "Grasp the mouse with the right gripper", + "subtask": "Static", "subtask_index": 2 }, { - "subtask": "Place the mouse on the mouse pad with the right gripper", + "subtask": "Release the laptop with the right gripper", "subtask_index": 3 }, { - "subtask": "Place the marker on the notebook with the right gripper", + "subtask": "End", "subtask_index": 4 }, { - "subtask": "Grasp the marker with the right gripper", + "subtask": "Grasp the laptop with the left gripper", "subtask_index": 5 }, - { - "subtask": "Grasp the mouse with the left gripper", - "subtask_index": 6 - }, - { - "subtask": "end", - "subtask_index": 7 - }, - { - "subtask": "Place the marker on the notebook with the left gripper", - "subtask_index": 8 - }, { "subtask": "null", - "subtask_index": 9 + "subtask_index": 6 } ], "atomic_actions": [ "grasp", - "lift", - "lower" + "open" ], "robot_name": [ - "Agilex_Cobot_Magic" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "radian", - "end_translation_dim": "meter", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -118558,23 +128294,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 30, - "total_frames": 19527, + "total_episodes": 50, + "total_frames": 11734, "fps": 30, - "total_tasks": 10, - "total_videos": 90, + "total_tasks": 7, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 26, - "action_dim": 26, - "camera_views": 3, - "dataset_size": "316.01 MB" + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "432.71 MB" }, - "frame_num": 19527, - "dataset_size": "316.01 MB", - "data_structure": "Agilex_Cobot_Magic_move_mouse_pen_green_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (18 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 11734, + "dataset_size": "432.71 MB", + "data_structure": "Airbot_MMK2_open_laptop_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:29" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -118646,10 +128382,33 @@ "has_audio": false } }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, "observation.state": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -118658,32 +128417,42 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 26 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -118692,26 +128461,36 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", - "left_gripper_open", - "left_eef_pos_x_m", - "left_eef_pos_y_m", - "left_eef_pos_z_m", - "left_eef_rot_euler_x_rad", - "left_eef_rot_euler_y_rad", - "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "right_gripper_open", - "right_eef_pos_x_m", - "right_eef_pos_y_m", - "right_eef_pos_z_m", - "right_eef_rot_euler_x_rad", - "right_eef_rot_euler_y_rad", - "right_eef_rot_euler_z_rad" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -118751,17 +128530,17 @@ }, "subtask_annotation": { "names": null, - "dtype": "int32", "shape": [ 5 - ] + ], + "dtype": "int32" }, "scene_annotation": { "names": null, - "dtype": "int32", "shape": [ 1 - ] + ], + "dtype": "int32" }, "eef_sim_pose_state": { "names": [ @@ -118778,10 +128557,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_sim_pose_action": { "names": [ @@ -118798,130 +128577,70 @@ "right_eef_rot_y", "right_eef_rot_z" ], - "dtype": "float32", "shape": [ 12 - ] + ], + "dtype": "float32" }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", "shape": [ 2 - ] + ], + "dtype": "int32" }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "dtype": "int32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", "shape": [ 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" ], - "dtype": "float32", - "shape": [ - 2 - ] + "dtype": "int32" } }, "authors": { @@ -118943,112 +128662,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" - }, - "AgiBot-g1_box_storage_cardboard_box_b": { - "path": "AgiBot-g1_box_storage_cardboard_box_b", - "dataset_name": "box_storage_cardboard_box_b", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "place", - "pick", - "grasp" - ], - "tasks": "Place the mouse and the power cord paper box into the container.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "paper_box", - "level1": "tool", - "level2": "paper_box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "carton", - "level1": "tool", - "level2": "carton", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-115855", - "dataset_size": "56.9GB", - "statistics": { - "total_episodes": 247, - "total_frames": 115855, - "total_tasks": 1, - "total_videos": 1976, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "7e28f5b3-7a48-4d52-b01e-390390932ffd", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the mouse and the power cord paper box into the container.", - "Pick up the mouse and the power cord paper box.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AgiBot-g1_box_storage_cardboard_box_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_box_storage_cardboard_box_b_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Galaxea_R1_Lite_change_baai_into_brain": { + "Airbot_MMK2_move_block_twice": { "task_categories": [ "robotics" ], @@ -119078,11 +128694,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_change_baai_into_brain", - "dataset_uuid": "75dad2ee-672e-402a-823a-198e5e42af62", + "dataset_name": "Airbot_MMK2_move_block_twice", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "eduction", - "level2": "school", + "level1": "household", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -119090,9 +128706,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "buiding_blocks", + "object_name": "rubik's_cube", "level1": "toys", - "level2": "buiding_blocks", + "level2": "rubik's_cube", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "square_building_blocks", + "level1": "toys", + "level2": "square_building_blocks", "level3": null, "level4": null, "level5": null @@ -119100,28 +128724,32 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "rearrange the word baai as brain." + "place the building blocks on the Rubik's Cube with your left hand and take them down with your right hand." ], "sub_tasks": [ { - "subtask": "Disassemble the second character A", + "subtask": "Grasp the yellow build block with the left gripper", "subtask_index": 0 }, { - "subtask": "Place the character R between first character B and third character A", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "End", + "subtask": "Place the yellow build block on the Rubik's Cube with the left gripper", "subtask_index": 2 }, { - "subtask": "Place the character N after character I", + "subtask": "Place the yellow build block on the table with the right gripper", "subtask_index": 3 }, { - "subtask": "null", + "subtask": "Grasp the yellow build block with the right gripper", "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 } ], "atomic_actions": [ @@ -119130,21 +128758,21 @@ "place" ], "robot_name": [ - "Galaxea_R1_Lite" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -119161,30 +128789,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 67, - "total_frames": 83804, + "total_episodes": 57, + "total_frames": 14643, "fps": 30, - "total_tasks": 5, - "total_videos": 268, + "total_tasks": 6, + "total_videos": 228, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, + "state_dim": 36, + "action_dim": 36, "camera_views": 4, - "dataset_size": "3.61 GB" + "dataset_size": "666.57 MB" }, - "frame_num": 83804, - "dataset_size": "3.61 GB", - "data_structure": "Galaxea_R1_Lite_change_baai_into_brain_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (55 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 14643, + "dataset_size": "666.57 MB", + "data_structure": "Airbot_MMK2_move_block_twice_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (45 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:66" + "train": "0:56" }, "features": { - "observation.images.cam_head_left_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -119193,8 +128821,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -119203,11 +128831,11 @@ "has_audio": false } }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -119216,8 +128844,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -119226,11 +128854,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -119239,8 +128867,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -119249,11 +128877,11 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_front_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -119262,8 +128890,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -119275,7 +128903,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -119290,14 +128918,36 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -119312,8 +128962,30 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -119464,66 +129136,6 @@ 2 ], "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" } }, "authors": { @@ -119545,11 +129157,11 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "R1_Lite_put_the_tableware_into_the_cupboard": { - "path": "R1_Lite_put_the_tableware_into_the_cupboard", - "dataset_name": "put_the_tableware_into_the_cupboard", + "R1_Lite_tableware_cleaning": { + "path": "R1_Lite_tableware_cleaning", + "dataset_name": "tableware_cleaning", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -119560,12 +129172,12 @@ "pick", "place" ], - "tasks": "Place the fork in the cutlery box", + "tasks": "End", "objects": [ { - "object_name": "cabinet", + "object_name": "table", "level1": "furniture", - "level2": "cabinet", + "level2": "table", "level3": null, "level4": null, "level5": null @@ -119595,35 +129207,43 @@ "level5": null }, { - "object_name": "fork", - "level1": "tableware", - "level2": "fork", + "object_name": "plate", + "level1": "container", + "level2": "plate", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "container", - "level2": "plate", + "object_name": "dishwashing_liquid", + "level1": "cleaning_supplies", + "level2": "dishwashing_liquid", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "sponge", + "level1": "cleaning_supplies", + "level2": "sponge", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": null, - "frame_range": "0-367607", - "dataset_size": "15.5GB", + "frame_range": "0-327254", + "dataset_size": "17.6GB", "statistics": { - "total_episodes": 143, - "total_frames": 367607, + "total_episodes": 102, + "total_frames": 327254, "total_tasks": 1, - "total_videos": 429, + "total_videos": 306, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "49bc9e7b-4491-4846-95a2-082b0e4f9178", + "dataset_uuid": "1969fa83-52f1-4c9f-94c6-6f9802241318", "language": [ "en", "zh" @@ -119632,33 +129252,30 @@ "robotics" ], "sub_tasks": [ - "Place the fork in the cutlery box", - "Place the spoon on the plate", - "Take the bowl out of the cupboard", - "Pick up the bowl and place it in the cupboard", - "Take the chopsticks out of the cupboard", - "Pull apart the shelf", - "Pick up the chopsticks and place it in the cupboard", "End", - "Take the plate out of the cupboard", - "Place the bowl on the shelf", - "Take the fork out of the cupboard", - "Abnormal", + "Rinse the chopsticks", + "Place the dish soap on the table", + "Wipe the plate", + "Rinse the plate", + "Place the chopsticks on the glass basin", + "Pick up a bowl and the sponge", + "Rinse the bowl", + "Wash the plate", + "Place the plate on the plate", "Place the plate on the table", - "Pick up the plate and place it in the cupboard", - "Place a chopstick on the table", - "Place the plate on the shelf", - "Pull apart the cupboard", - "Place the fork on the table", - "Place a pair of chopsticks in the cutlery box", - "Take the spoon out of the cupboard", + "Place the spoon in the glass basin", + "Place the bowl on the table", + "Wipe the bowl", + "Squeeze it onto the sponge", + "Abnormal", + "Rinse the spoon", "Place the bowl on the plate", - "Place the spoon on the table", - "Place the spoon on the cutlery box", - "Open the cupboard", - "Pick up the spoon and place it in the cupboard", - "Close the cupboard", - "Pick up the fork and place it in the cupboard", + "Put down the sponge", + "Place the bowl on the bowl", + "Turn off the faucet", + "Grab the dish soap", + "Wash the bowl", + "Turn on the faucet", "null" ], "annotations": { @@ -119696,24 +129313,23 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_put_the_tableware_into_the_cupboard_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_put_the_tableware_into_the_cupboard_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "R1_Lite_tableware_cleaning_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_tableware_cleaning_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Cobot_Magic_fold_the_towel": { - "path": "Cobot_Magic_fold_the_towel", - "dataset_name": "fold_the_towel", + "AIRBOT_MMK2_boxs_storage": { + "path": "AIRBOT_MMK2_boxs_storage", + "dataset_name": "boxs_storage", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "five_finger_hand" ], "scene_type": [], "atomic_actions": [ "grasp", - "place", "pick", - "fold" + "place" ], - "tasks": "End", + "tasks": "Grasp the potato with the left gripper", "objects": [ { "object_name": "table", @@ -119724,35 +129340,83 @@ "level5": null }, { - "object_name": "tablecloth", - "level1": "clothing", - "level2": "tablecloth", + "object_name": "calculator_box", + "level1": "container", + "level2": "calculator_box", "level3": null, "level4": null, "level5": null }, { - "object_name": "towel", - "level1": "clothing", - "level2": "towel", + "object_name": "power_bank_box", + "level1": "container", + "level2": "power_bank_box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bucket", + "level1": "container", + "level2": "bucket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pomegranate", + "level1": "fruit", + "level2": "pomegranate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mango", + "level1": "fruit", + "level2": "mango", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "potato", + "level1": "vegetable", + "level2": "potato", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pumpkin", + "level1": "vegetable", + "level2": "pumpkin", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "compartment", + "level1": "container", + "level2": "compartment", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-235124", - "dataset_size": "5.1GB", + "frame_range": "0-40906", + "dataset_size": "1.2GB", "statistics": { - "total_episodes": 177, - "total_frames": 235124, + "total_episodes": 138, + "total_frames": 40906, "total_tasks": 3, - "total_videos": 531, + "total_videos": 552, "total_chunks": 1, "chunks_size": 1000, - "fps": 50 + "fps": 30 }, - "dataset_uuid": "15a5b235-0337-4484-9e0f-be8def7a8879", + "dataset_uuid": "fda26243-c5f4-4e3a-8a7e-84a7d7c71f3a", "language": [ "en", "zh" @@ -119761,14 +129425,20 @@ "robotics" ], "sub_tasks": [ + "Grasp the potato with the left gripper", + "Place the phone case box in the yellow box with the right gripper", + "Place the pumpkin into the right compartment of the storage box with the right gripper", + "Place the calculator box in the yellow box with the left gripper", + "Static", + "Grasp the calculator box with the left gripper", "End", - "Fold the towel upwards with the both gripper", - "Fold the towel upwards with the right gripper", - "Tidy up the towels", - "Fold the towel from right to left with the right gripper", - "Fold the towel upwards with the left gripper", - "Fold the towel", - "abnormal", + "Grasp the mango with the right gripper", + "Grasp the pomegranate with the left gripper", + "Place the pomegranate into the left compartment of the storage box with the left gripper", + "Place the mango into the right compartment of the storage box with the right gripper", + "Grasp the phone case box with the right gripper", + "Place the potato into the left compartment of the storage box with the left gripper", + "Grasp the pumpkin with the right gripper", "null" ], "annotations": { @@ -119806,10 +129476,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Cobot_Magic_fold_the_towel_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_fold_the_towel_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_boxs_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_boxs_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_storage_cake_both_hands": { + "Airbot_MMK2_storage_bell_pepper_bowl": { "task_categories": [ "robotics" ], @@ -119839,11 +129509,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_cake_both_hands", + "dataset_name": "Airbot_MMK2_storage_bell_pepper_bowl", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "commercial_convenience", - "level2": "supermarket", + "level1": "living_room", + "level2": "household", "level3": null, "level4": null, "level5": null @@ -119851,25 +129521,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "home_storage", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cakes", - "level1": "bread", - "level2": "cakes", + "object_name": "yellow_bell_pepper", + "level1": "vegetables", + "level2": "yellow_bell_pepper", "level3": null, "level4": null, "level5": null }, { - "object_name": "lid", - "level1": "laboratory_supplies", - "level2": "lid", + "object_name": "bowl", + "level1": "kitchen_supplies", + "level2": "bowl", "level3": null, "level4": null, "level5": null @@ -119877,36 +129539,32 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "put two cakes in the lid." + "put the yellow green peppers into the bowl with right hand." ], "sub_tasks": [ { - "subtask": "Grasp the cake with the left gripper", + "subtask": "Abnormal", "subtask_index": 0 }, { - "subtask": "Place the cake on the white basket with the right gripper", + "subtask": "Place yellow round chili pepper on the blue bowl with the right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the cake with the right gripper", + "subtask": "Grasp the yellow round chili pepper with the right gripper", "subtask_index": 2 }, { - "subtask": "Place the cake on the white basket with the left gripper", + "subtask": "End", "subtask_index": 3 }, { "subtask": "Static", "subtask_index": 4 }, - { - "subtask": "End", - "subtask_index": 5 - }, { "subtask": "null", - "subtask_index": 6 + "subtask_index": 5 } ], "atomic_actions": [ @@ -119946,23 +129604,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 47, - "total_frames": 4445, + "total_episodes": 50, + "total_frames": 4495, "fps": 30, - "total_tasks": 7, - "total_videos": 188, + "total_tasks": 6, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "188.05 MB" + "dataset_size": "149.48 MB" }, - "frame_num": 4445, - "dataset_size": "188.05 MB", - "data_structure": "Airbot_MMK2_storage_cake_both_hands_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 4495, + "dataset_size": "149.48 MB", + "data_structure": "Airbot_MMK2_Airbot_MMK2_storage_bell_pepper_bowl_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:46" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -120316,9 +129974,116 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "R1_Lite_move_the_position_of_the_black_marker": { - "path": "R1_Lite_move_the_position_of_the_black_marker", - "dataset_name": "move_the_position_of_the_black_marker", + "AIRBOT_MMK2_place_the_blue_and_purple_blocks": { + "path": "AIRBOT_MMK2_place_the_blue_and_purple_blocks", + "dataset_name": "place_the_blue_and_purple_blocks", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Grasp the purple cube block with the left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "square", + "level1": "toy", + "level2": "square", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "container", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-10277", + "dataset_size": "502.1MB", + "statistics": { + "total_episodes": 50, + "total_frames": 10277, + "total_tasks": 1, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "515b60a8-59ef-485b-bd22-34f963fd853c", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Grasp the purple cube block with the left gripper", + "Grasp the blue cube block with the right gripper", + "Place the purple cube block on the plate with the left gripper", + "Static", + "Place the blue cube block on the purple cube block with the right gripper", + "End", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "AIRBOT_MMK2_place_the_blue_and_purple_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_place_the_blue_and_purple_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "R1_Lite_move_the_position_of_the_triangle_bread": { + "path": "R1_Lite_move_the_position_of_the_triangle_bread", + "dataset_name": "move_the_position_of_the_triangle_bread", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -120329,7 +130094,7 @@ "pick", "grasp" ], - "tasks": "Grasp the Marker pen with left gripper", + "tasks": "Grasp the pizza with right gripper", "objects": [ { "object_name": "table", @@ -120339,6 +130104,14 @@ "level4": null, "level5": null }, + { + "object_name": "waffle", + "level1": "food", + "level2": "waffle", + "level3": null, + "level4": null, + "level5": null + }, { "object_name": "pen", "level1": "office_supplies", @@ -120674,21 +130447,29 @@ "level3": null, "level4": null, "level5": null + }, + { + "object_name": "triangle_bread", + "level1": "food", + "level2": "triangle_bread", + "level3": null, + "level4": null, + "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-15618", - "dataset_size": "536.6MB", + "frame_range": "0-8540", + "dataset_size": "300.1MB", "statistics": { - "total_episodes": 61, - "total_frames": 15618, + "total_episodes": 40, + "total_frames": 8540, "total_tasks": 1, - "total_videos": 244, + "total_videos": 160, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "5d591692-f2ae-4fa8-97d1-9cd1613b1b33", + "dataset_uuid": "f60ef2d6-fc04-4276-b75e-e8b54ab7602c", "language": [ "en", "zh" @@ -120697,12 +130478,242 @@ "robotics" ], "sub_tasks": [ - "Grasp the Marker pen with left gripper", - "Grasp the Marker pen with right gripper", - "Place the Marker pen on the table with right gripper", + "Grasp the pizza with right gripper", "Static", + "Place the pizza on the table with right gripper", "End", - "Place the Marker pen on the table with left gripper", + "Grasp the pizza with left gripper", + "Place the pizza on the table with left gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_move_the_position_of_the_triangle_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_move_the_position_of_the_triangle_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "R1_Lite_plug_the_socket": { + "path": "R1_Lite_plug_the_socket", + "dataset_name": "plug_the_socket", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Hold the socket with your right hand and unplug it with your left hand", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mosquito_repellent_liquid", + "level1": "daily_necessities", + "level2": "mosquito_repellent_liquid", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plug", + "level1": "electric_appliance", + "level2": "plug", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "socket", + "level1": "electric_appliance", + "level2": "socket", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-115158", + "dataset_size": "5.1GB", + "statistics": { + "total_episodes": 96, + "total_frames": 115158, + "total_tasks": 1, + "total_videos": 288, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "a456ff6a-8652-485b-a545-1bbcee60a201", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Hold the socket with your right hand and unplug it with your left hand", + "abnormal", + "Pick up the mosquito repellent liquid", + "Place the mosquito repellent liquid on the table", + "Insert the plug into the socket", + "Hold the socket with your left hand and unplug it with your right hand", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_plug_the_socket_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_plug_the_socket_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "AIRBOT_MMK2_pour_out_the_beauty_blender": { + "path": "AIRBOT_MMK2_pour_out_the_beauty_blender", + "dataset_name": "pour_out_the_beauty_blender", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick" + ], + "tasks": "Place the paper cup on the table with right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "marble", + "level1": "toy", + "level2": "marble", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cup", + "level1": "container", + "level2": "cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "container", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-17577", + "dataset_size": "652.5MB", + "statistics": { + "total_episodes": 47, + "total_frames": 17577, + "total_tasks": 1, + "total_videos": 188, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "9bd6999e-de56-4b21-b103-e3054f98d4ef", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the paper cup on the table with right gripper", + "Pour the bullets from the paper cup into the bowl with right gripper", + "Static", + "Grasp the paper cup containing bullets with right gripper", + "End", + "Abnormal", "null" ], "annotations": { @@ -120740,10 +130751,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_move_the_position_of_the_black_marker_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_move_the_position_of_the_black_marker_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_pour_out_the_beauty_blender_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_pour_out_the_beauty_blender_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_move_cup_paper_box": { + "Airbot_MMK2_move_block_wet_wipes": { "task_categories": [ "robotics" ], @@ -120773,11 +130784,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_move_cup_paper_box", + "dataset_name": "Airbot_MMK2_move_block_wet_wipes", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "kitchen", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -120785,25 +130796,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "paper_boxes", - "level1": "home_storage", - "level2": "paper_boxes", + "object_name": "white_tray", + "level1": "kitchen_supplies", + "level2": "white_tray", "level3": null, "level4": null, "level5": null }, { - "object_name": "pink_plate", - "level1": "kitchen_supplies", - "level2": "pink_plate", + "object_name": "arch_building_blocks", + "level1": "toys", + "level2": "arch_building_blocks", "level3": null, "level4": null, "level5": null }, { - "object_name": "coffee cup", + "object_name": "wet_tissue_paper", "level1": "kitchen_supplies", - "level2": "coffee cup", + "level2": "wet_tissue_paper", "level3": null, "level4": null, "level5": null @@ -120811,31 +130822,31 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "place the paper box on the paper box with your left hand and put the coffee cup on the plate with your right hand." + "put the wet wipes and building blocks into the white tray respectively with your left and right hands." ], "sub_tasks": [ { - "subtask": "Grasp the mouse box with the left gripper", + "subtask": "Place the arched build blocks on the white board with the right gripper", "subtask_index": 0 }, { - "subtask": "Place the mouse box on the calculator box with the left gripper", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "End", + "subtask": "Grasp the wet wipes with the left gripper", "subtask_index": 2 }, { - "subtask": "Grasp the coffee cup with the right gripper", + "subtask": "Grasp the arched build blocks with the right gripper", "subtask_index": 3 }, { - "subtask": "Place the coffee cup on the white plate with the right gripper", + "subtask": "Place the wet wipes on the white board with the left gripper", "subtask_index": 4 }, { - "subtask": "Place the coffee cup on the pink plate with the right gripper", + "subtask": "Abnormal", "subtask_index": 5 }, { @@ -120880,23 +130891,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 93, - "total_frames": 25588, + "total_episodes": 60, + "total_frames": 15271, "fps": 30, "total_tasks": 7, - "total_videos": 372, + "total_videos": 240, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "929.19 MB" + "dataset_size": "479.18 MB" }, - "frame_num": 25588, - "dataset_size": "929.19 MB", - "data_structure": "Airbot_MMK2_move_cup_paper_box_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (81 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 15271, + "dataset_size": "479.18 MB", + "data_structure": "Airbot_MMK2_move_block_wet_wipes_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (48 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:92" + "train": "0:59" }, "features": { "observation.images.cam_head_rgb": { @@ -121250,207 +131261,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "AgiBot-g1_box_storage_cardboard_box_c": { - "path": "AgiBot-g1_box_storage_cardboard_box_c", - "dataset_name": "box_storage_cardboard_box_c", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "place", - "pick", - "grasp" - ], - "tasks": "Place the mouse and the power cord paper box into the container.", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "carton", - "level1": "container", - "level2": "carton", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-230771", - "dataset_size": "109.0GB", - "statistics": { - "total_episodes": 476, - "total_frames": 230771, - "total_tasks": 1, - "total_videos": 3808, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "f5991bf3-3900-460c-adbd-56e87ac4bbc1", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the mouse and the power cord paper box into the container.", - "Pick up the mouse and the power cord paper box.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AgiBot-g1_box_storage_cardboard_box_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_box_storage_cardboard_box_c_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Cobot_Magic_movethe_position_of_the_bluetooth": { - "path": "Cobot_Magic_movethe_position_of_the_bluetooth", - "dataset_name": "movethe_position_of_the_bluetooth", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Place bluetooth earbud on the right side of the table", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bluetooth_earbud", - "level1": "electronic_products", - "level2": "bluetooth_earbud", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bluetooth_earphone_case", - "level1": "electronic_products", - "level2": "bluetooth_earphone_case", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-52508", - "dataset_size": "850.5MB", - "statistics": { - "total_episodes": 198, - "total_frames": 52508, - "total_tasks": 2, - "total_videos": 594, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 50 - }, - "dataset_uuid": "dc390f5c-b242-48d0-86f0-0932b22a8f6a", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place bluetooth earbud on the right side of the table", - "Place bluetooth earphone case on the left side of the table", - "Grasp the Bluetooth earphone case", - "Grasp the Bluetooth earbud", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_movethe_position_of_the_bluetooth_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_movethe_position_of_the_bluetooth_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Airbot_MMK2_open_laptop": { + "Airbot_MMK2_open_door_left": { "task_categories": [ "robotics" ], @@ -121480,11 +131291,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_open_laptop", + "dataset_name": "Airbot_MMK2_open_door_left", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "office_workspace", - "level2": "office", + "level1": "scene_level1", + "level2": "scene_level2", "level3": null, "level4": null, "level5": null @@ -121492,50 +131303,37 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "laptop", - "level1": "appliances", - "level2": "laptop", + "object_name": "cabinet", + "level1": "furniture", + "level2": "cabinet", "level3": null, "level4": null, - "level5:operation_platform_height": 77.2 + "level5": null } ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "press the laptop with your right hand and then open it with your left hand." + "open the cabinet." ], "sub_tasks": [ { - "subtask": "Press the laptop with the right gripper", + "subtask": "Touch the door with the left gripper", "subtask_index": 0 }, { - "subtask": "Open the laptop with the left gripper", + "subtask": "Open the door with the left gripper", "subtask_index": 1 }, - { - "subtask": "Static", - "subtask_index": 2 - }, - { - "subtask": "Release the laptop with the right gripper", - "subtask_index": 3 - }, { "subtask": "End", - "subtask_index": 4 - }, - { - "subtask": "Grasp the laptop with the left gripper", - "subtask_index": 5 + "subtask_index": 2 }, { "subtask": "null", - "subtask_index": 6 + "subtask_index": 3 } ], "atomic_actions": [ - "grasp", "open" ], "robot_name": [ @@ -121570,23 +131368,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 11734, + "total_episodes": 49, + "total_frames": 6418, "fps": 30, - "total_tasks": 7, - "total_videos": 200, + "total_tasks": 4, + "total_videos": 196, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "432.71 MB" + "dataset_size": "188.62 MB" }, - "frame_num": 11734, - "dataset_size": "432.71 MB", - "data_structure": "Airbot_MMK2_open_laptop_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 6418, + "dataset_size": "188.62 MB", + "data_structure": "Airbot_MMK2_open_door_left_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:49" + "train": "0:48" }, "features": { "observation.images.cam_head_rgb": { @@ -121940,9 +131738,9 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "R1_Lite_tableware_cleaning": { - "path": "R1_Lite_tableware_cleaning", - "dataset_name": "tableware_cleaning", + "Cobot_Magic_cut_banana": { + "path": "Cobot_Magic_cut_banana", + "dataset_name": "cut_banana", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -121951,9 +131749,10 @@ "atomic_actions": [ "grasp", "pick", + "cut", "place" ], - "tasks": "End", + "tasks": "Cut the banana with your left hand.", "objects": [ { "object_name": "table", @@ -121964,67 +131763,59 @@ "level5": null }, { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chopsticks", - "level1": "tableware", - "level2": "chopsticks", + "object_name": "banana", + "level1": "fruit", + "level2": "banana", "level3": null, "level4": null, "level5": null }, { - "object_name": "spoon", - "level1": "tableware", - "level2": "spoon", + "object_name": "plastic_vegetable_board", + "level1": "tool", + "level2": "plastic_vegetable_board", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "container", - "level2": "plate", + "object_name": "knife_holder", + "level1": "furniture", + "level2": "knife_holder", "level3": null, "level4": null, "level5": null }, { - "object_name": "dishwashing_liquid", - "level1": "cleaning_supplies", - "level2": "dishwashing_liquid", + "object_name": "knife", + "level1": "tool", + "level2": "knife", "level3": null, "level4": null, "level5": null }, { - "object_name": "sponge", - "level1": "cleaning_supplies", - "level2": "sponge", + "object_name": "fake_banana", + "level1": "toy", + "level2": "fake_banana", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": null, - "frame_range": "0-327254", - "dataset_size": "17.6GB", + "operation_platform_height": 77.2, + "frame_range": "0-310367", + "dataset_size": "4.2GB", "statistics": { - "total_episodes": 102, - "total_frames": 327254, - "total_tasks": 1, - "total_videos": 306, + "total_episodes": 583, + "total_frames": 310367, + "total_tasks": 2, + "total_videos": 1749, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "1969fa83-52f1-4c9f-94c6-6f9802241318", + "dataset_uuid": "3637d17f-4a5a-4801-902f-3e5aee09f48b", "language": [ "en", "zh" @@ -122033,30 +131824,15 @@ "robotics" ], "sub_tasks": [ - "End", - "Rinse the chopsticks", - "Place the dish soap on the table", - "Wipe the plate", - "Rinse the plate", - "Place the chopsticks on the glass basin", - "Pick up a bowl and the sponge", - "Rinse the bowl", - "Wash the plate", - "Place the plate on the plate", - "Place the plate on the table", - "Place the spoon in the glass basin", - "Place the bowl on the table", - "Wipe the bowl", - "Squeeze it onto the sponge", - "Abnormal", - "Rinse the spoon", - "Place the bowl on the plate", - "Put down the sponge", - "Place the bowl on the bowl", - "Turn off the faucet", - "Grab the dish soap", - "Wash the bowl", - "Turn on the faucet", + "Cut the banana with your left hand.", + "Cut the banana with your right hand.", + "Hold the blade with your left hand.", + "Discard the waste.", + "Secure the blade with your left hand.", + "Place the knife on the storage rack with your right hand.", + "Grab the knife handle with your right hand.", + "end", + "Lift the knife with your right hand.", "null" ], "annotations": { @@ -122094,12 +131870,12 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_tableware_cleaning_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_tableware_cleaning_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Cobot_Magic_cut_banana_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_cut_banana_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "AIRBOT_MMK2_boxs_storage": { - "path": "AIRBOT_MMK2_boxs_storage", - "dataset_name": "boxs_storage", + "AIRBOT_MMK2_item_storage": { + "path": "AIRBOT_MMK2_item_storage", + "dataset_name": "item_storage", "robot_type": "", "end_effector_type": [ "five_finger_hand" @@ -122110,94 +131886,70 @@ "pick", "place" ], - "tasks": "Grasp the potato with the left gripper", + "tasks": "Grasp the umbrella with the left gripper", "objects": [ { "object_name": "table", "level1": "furniture", - "level2": "table", + "level2": "able", "level3": null, "level4": null, "level5": null }, { - "object_name": "calculator_box", + "object_name": "barrel", "level1": "container", - "level2": "calculator_box", + "level2": "barrel", "level3": null, "level4": null, "level5": null }, { - "object_name": "power_bank_box", - "level1": "container", - "level2": "power_bank_box", + "object_name": "umbrella", + "level1": "daily_necessities", + "level2": "umbrella", "level3": null, "level4": null, "level5": null }, { - "object_name": "bucket", + "object_name": "bottle", "level1": "container", - "level2": "bucket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pomegranate", - "level1": "fruit", - "level2": "pomegranate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mango", - "level1": "fruit", - "level2": "mango", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "potato", - "level1": "vegetable", - "level2": "potato", + "level2": "bottle", "level3": null, "level4": null, "level5": null }, { - "object_name": "pumpkin", - "level1": "vegetable", - "level2": "pumpkin", + "object_name": "lid", + "level1": "daily_necessities", + "level2": "lid", "level3": null, "level4": null, "level5": null }, { - "object_name": "compartment", + "object_name": "bowl", "level1": "container", - "level2": "compartment", + "level2": "bowl", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-40906", - "dataset_size": "1.2GB", + "frame_range": "0-18773", + "dataset_size": "717.3MB", "statistics": { - "total_episodes": 138, - "total_frames": 40906, - "total_tasks": 3, - "total_videos": 552, + "total_episodes": 72, + "total_frames": 18773, + "total_tasks": 2, + "total_videos": 288, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "fda26243-c5f4-4e3a-8a7e-84a7d7c71f3a", + "dataset_uuid": "cfed792e-cf1c-4d1f-b6fc-a022e9be4d5e", "language": [ "en", "zh" @@ -122206,20 +131958,16 @@ "robotics" ], "sub_tasks": [ - "Grasp the potato with the left gripper", - "Place the phone case box in the yellow box with the right gripper", - "Place the pumpkin into the right compartment of the storage box with the right gripper", - "Place the calculator box in the yellow box with the left gripper", - "Static", - "Grasp the calculator box with the left gripper", + "Grasp the umbrella with the left gripper", "End", - "Grasp the mango with the right gripper", - "Grasp the pomegranate with the left gripper", - "Place the pomegranate into the left compartment of the storage box with the left gripper", - "Place the mango into the right compartment of the storage box with the right gripper", - "Grasp the phone case box with the right gripper", - "Place the potato into the left compartment of the storage box with the left gripper", - "Grasp the pumpkin with the right gripper", + "Grasp the bowl with the right gripper", + "Grasp the bowl with the left gripper", + "Place the cup into the white basket with the right gripper", + "Static", + "Place the umbrella into the white basket with the left gripper", + "Place the bowl into the yellow basket with the left gripper", + "Place the bowl into the yellow basket with the right gripper", + "Grasp the cup with the right gripper", "null" ], "annotations": { @@ -122257,10 +132005,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_boxs_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_boxs_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_item_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_item_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_storage_bell_pepper_bowl": { + "Galaxea_R1_Lite_mix_color_large_test_tube": { "task_categories": [ "robotics" ], @@ -122290,11 +132038,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_bell_pepper_bowl", + "dataset_name": "Galaxea_R1_Lite_mix_color_large_test_tube", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "living_room", - "level2": "household", + "level1": "office_workspace", + "level2": "office", "level3": null, "level4": null, "level5": null @@ -122302,17 +132050,49 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "yellow_bell_pepper", - "level1": "vegetables", - "level2": "yellow_bell_pepper", + "object_name": "red_pigment", + "level1": "materials", + "level2": "red_pigment", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_pigment", + "level1": "materials", + "level2": "blue_pigment", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yellow_pigment", + "level1": "materials", + "level2": "yellow_pigment", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "test_tube_rack", + "level1": "holding_utensils", + "level2": "test_tube_rack", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "large_test_tubes", + "level1": "laboratory_supplies", + "level2": "large_test_tubes", "level3": null, "level4": null, "level5": null }, { - "object_name": "bowl", - "level1": "kitchen_supplies", - "level2": "bowl", + "object_name": "beaker", + "level1": "holding_utensils", + "level2": "beaker", "level3": null, "level4": null, "level5": null @@ -122320,55 +132100,80 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "put the yellow green peppers into the bowl with right hand." + "pick up a test tube with pigment on left test tube rack and a test tube with pigment on right test tube rack by grippers and pour them into the beaker." ], "sub_tasks": [ { - "subtask": "Abnormal", + "subtask": "Place the test tube into the paper cup with the left gripper", "subtask_index": 0 }, { - "subtask": "Place yellow round chili pepper on the blue bowl with the right gripper", + "subtask": "Pour the blue reagent into the graduated cylinder with the left gripper", "subtask_index": 1 }, { - "subtask": "Grasp the yellow round chili pepper with the right gripper", + "subtask": "Pour the red reagent into the graduated cylinder with the left gripper", "subtask_index": 2 }, { - "subtask": "End", + "subtask": "Pour the red reagent into the graduated cylinder with the right gripper", "subtask_index": 3 }, { - "subtask": "Static", + "subtask": "Grasp the blue reagent with the left gripper", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "End", "subtask_index": 5 + }, + { + "subtask": "Place the test tube into the paper cup with the right gripper", + "subtask_index": 6 + }, + { + "subtask": "Grasp the yellow reagent with the right gripper", + "subtask_index": 7 + }, + { + "subtask": "Pour the yellow reagent into the graduated cylinder with the right gripper", + "subtask_index": 8 + }, + { + "subtask": "Grasp the red reagent with the left gripper", + "subtask_index": 9 + }, + { + "subtask": "Grasp the red reagent with the right gripper", + "subtask_index": 10 + }, + { + "subtask": "null", + "subtask_index": 11 } ], "atomic_actions": [ "grasp", "pick", - "place" + "place", + "pour" ], "robot_name": [ - "Airbot_MMK2" + "Galaxea_R1_Lite" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -122385,30 +132190,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 4495, + "total_episodes": 121, + "total_frames": 131656, "fps": 30, - "total_tasks": 6, - "total_videos": 200, + "total_tasks": 12, + "total_videos": 484, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, + "state_dim": 14, + "action_dim": 14, "camera_views": 4, - "dataset_size": "149.48 MB" + "dataset_size": "4.70 GB" }, - "frame_num": 4495, - "dataset_size": "149.48 MB", - "data_structure": "Airbot_MMK2_Airbot_MMK2_storage_bell_pepper_bowl_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 131656, + "dataset_size": "4.70 GB", + "data_structure": "Galaxea_R1_Lite_mix_color_large_test_tube_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (109 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:49" + "train": "0:120" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -122417,8 +132222,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -122427,11 +132232,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_head_right_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -122440,8 +132245,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -122450,11 +132255,11 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -122463,8 +132268,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -122473,11 +132278,11 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -122486,8 +132291,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -122499,7 +132304,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -122514,36 +132319,14 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -122558,30 +132341,8 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "timestamp": { @@ -122732,810 +132493,90 @@ 2 ], "dtype": "int32" - } - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" - } - ] - }, - "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", - "support_info": "For technical support, please open an issue on our GitHub repository.", - "license_details": "apache-2.0", - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", - "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", - "version_info": "Initial Release", - "data_path": "data/chunk-{id}/episode_{id}.parquet", - "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" - }, - "AIRBOT_MMK2_place_the_blue_and_purple_blocks": { - "path": "AIRBOT_MMK2_place_the_blue_and_purple_blocks", - "dataset_name": "place_the_blue_and_purple_blocks", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Grasp the purple cube block with the left gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "square", - "level1": "toy", - "level2": "square", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-10277", - "dataset_size": "502.1MB", - "statistics": { - "total_episodes": 50, - "total_frames": 10277, - "total_tasks": 1, - "total_videos": 200, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "515b60a8-59ef-485b-bd22-34f963fd853c", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the purple cube block with the left gripper", - "Grasp the blue cube block with the right gripper", - "Place the purple cube block on the plate with the left gripper", - "Static", - "Place the blue cube block on the purple cube block with the right gripper", - "End", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_place_the_blue_and_purple_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_place_the_blue_and_purple_blocks_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "R1_Lite_move_the_position_of_the_triangle_bread": { - "path": "R1_Lite_move_the_position_of_the_triangle_bread", - "dataset_name": "move_the_position_of_the_triangle_bread", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "place", - "pick", - "grasp" - ], - "tasks": "Grasp the pizza with right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "waffle", - "level1": "food", - "level2": "waffle", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "pen", - "level1": "office_supplies", - "level2": "pen", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "banana", - "level1": "fruit", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bath_ball", - "level1": "daily_necessities", - "level2": "bath_ball", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "can", - "level1": "container", - "level2": "can", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "eraser", - "level1": "office_supplies", - "level2": "eraser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "hard_cleanser", - "level1": "daily_necessities", - "level2": "hard_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "long_bread", - "level1": "food", - "level2": "long_bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peeler", - "level1": "tool", - "level2": "peeler", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "block", - "level1": "toy", - "level2": "block", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "duck", - "level1": "toy", - "level2": "duck", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soap", - "level1": "daily_necessities", - "level2": "soap", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "container", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "basket", - "level1": "container", - "level2": "basket", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cola", - "level1": "drink", - "level2": "cola", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "detergent", - "level1": "daily_necessities", - "level2": "detergent", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_beater", - "level1": "electrical_appliances", - "level2": "egg_beater", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "towel", - "level1": "clothing", - "level2": "towel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "orange", - "level1": "fruit", - "level2": "orange", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "peach", - "level1": "fruit", - "level2": "peach", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "marker", - "level1": "office_supplies", - "level2": "marker", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "rubiks_cube", - "level1": "toy", - "level2": "rubiks_cube", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bread_slice", - "level1": "food", - "level2": "bread_slice", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "brush", - "level1": "daily_necessities", - "level2": "brush", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yogurt", - "level1": "drink", - "level2": "yogurt", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "power_strip", - "level1": "electric_appliance", - "level2": "power_strip", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "milk", - "level1": "drink", - "level2": "milk", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soda", - "level1": "drink", - "level2": "soda", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "lime", - "level1": "fruit", - "level2": "lime", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "coffee_capsule", - "level1": "drink", - "level2": "coffee_capsule", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "dish", - "level1": "container", - "level2": "dish", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "glass", - "level1": "furniture", - "level2": "glass", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "egg_yolk_pastry", - "level1": "food", - "level2": "egg_yolk_pastry", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "glasses_case", - "level1": "daily_necessities", - "level2": "glasses_case", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "gum", - "level1": "daily_necessities", - "level2": "gum", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "tape", - "level1": "daily_necessities", - "level2": "tape", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "soft_cleanser", - "level1": "daily_necessities", - "level2": "soft_cleanser", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chips", - "level1": "food", - "level2": "chips", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "chocolate", - "level1": "food", - "level2": "chocolate", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cookie", - "level1": "food", - "level2": "cookie", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "spoon", - "level1": "tableware", - "level2": "spoon", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "triangle_bread", - "level1": "food", - "level2": "triangle_bread", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-8540", - "dataset_size": "300.1MB", - "statistics": { - "total_episodes": 40, - "total_frames": 8540, - "total_tasks": 1, - "total_videos": 160, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "f60ef2d6-fc04-4276-b75e-e8b54ab7602c", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the pizza with right gripper", - "Static", - "Place the pizza on the table with right gripper", - "End", - "Grasp the pizza with left gripper", - "Place the pizza on the table with left gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_move_the_position_of_the_triangle_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_move_the_position_of_the_triangle_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_left_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "R1_Lite_plug_the_socket": { - "path": "R1_Lite_plug_the_socket", - "dataset_name": "plug_the_socket", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Hold the socket with your right hand and unplug it with your left hand", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null }, - { - "object_name": "mosquito_repellent_liquid", - "level1": "daily_necessities", - "level2": "mosquito_repellent_liquid", - "level3": null, - "level4": null, - "level5": null + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" }, - { - "object_name": "plug", - "level1": "electric_appliance", - "level2": "plug", - "level3": null, - "level4": null, - "level5": null + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" }, - { - "object_name": "socket", - "level1": "electric_appliance", - "level2": "socket", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-115158", - "dataset_size": "5.1GB", - "statistics": { - "total_episodes": 96, - "total_frames": 115158, - "total_tasks": 1, - "total_videos": 288, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "a456ff6a-8652-485b-a545-1bbcee60a201", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Hold the socket with your right hand and unplug it with your left hand", - "abnormal", - "Pick up the mosquito repellent liquid", - "Place the mosquito repellent liquid on the table", - "Insert the plug into the socket", - "Hold the socket with your left hand and unplug it with your right hand", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_plug_the_socket_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_plug_the_socket_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AIRBOT_MMK2_pour_out_the_beauty_blender": { - "path": "AIRBOT_MMK2_pour_out_the_beauty_blender", - "dataset_name": "pour_out_the_beauty_blender", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place", - "pick" - ], - "tasks": "Place the paper cup on the table with right gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" }, - { - "object_name": "marble", - "level1": "toy", - "level2": "marble", - "level3": null, - "level4": null, - "level5": null + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" }, - { - "object_name": "cup", - "level1": "container", - "level2": "cup", - "level3": null, - "level4": null, - "level5": null + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" }, - { - "object_name": "bowl", - "level1": "container", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-17577", - "dataset_size": "652.5MB", - "statistics": { - "total_episodes": 47, - "total_frames": 17577, - "total_tasks": 1, - "total_videos": 188, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "9bd6999e-de56-4b21-b103-e3054f98d4ef", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Place the paper cup on the table with right gripper", - "Pour the bullets from the paper cup into the bowl with right gripper", - "Static", - "Grasp the paper cup containing bullets with right gripper", - "End", - "Abnormal", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" } ] }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", "homepage": "https://flagopen.github.io/RoboCOIN/", "paper": "https://arxiv.org/abs/2511.17441", "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_pour_out_the_beauty_blender_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_pour_out_the_beauty_blender_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Airbot_MMK2_move_block_wet_wipes": { + "Airbot_MMK2_swap_bbs_cake_plate": { "task_categories": [ "robotics" ], @@ -123565,11 +132606,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_move_block_wet_wipes", + "dataset_name": "Airbot_MMK2_swap_bbs_cake_plate", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "living_room", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -123577,25 +132618,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "white_tray", - "level1": "kitchen_supplies", - "level2": "white_tray", + "object_name": "bb_pellets", + "level1": "toys", + "level2": "bb_pellets", "level3": null, "level4": null, "level5": null }, { - "object_name": "arch_building_blocks", - "level1": "toys", - "level2": "arch_building_blocks", + "object_name": "cake", + "level1": "bread", + "level2": "cake", "level3": null, "level4": null, "level5": null }, { - "object_name": "wet_tissue_paper", + "object_name": "plate", "level1": "kitchen_supplies", - "level2": "wet_tissue_paper", + "level2": "plate", "level3": null, "level4": null, "level5": null @@ -123603,36 +132644,40 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "put the wet wipes and building blocks into the white tray respectively with your left and right hands." + "take the bbs out of the plate with left hand and put the cake in with right hand." ], "sub_tasks": [ { - "subtask": "Place the arched build blocks on the white board with the right gripper", + "subtask": "Grasp the cake on the table with the right gripper", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Place the bullet on the table with the left gripper", "subtask_index": 1 }, { - "subtask": "Grasp the wet wipes with the left gripper", + "subtask": "Grasp the bullet into the plate with the left gripper", "subtask_index": 2 }, { - "subtask": "Grasp the arched build blocks with the right gripper", + "subtask": "Abnormal", "subtask_index": 3 }, { - "subtask": "Place the wet wipes on the white board with the left gripper", + "subtask": "Place the cake into the plate with the right gripper", "subtask_index": 4 }, { - "subtask": "Abnormal", + "subtask": "Static", "subtask_index": 5 }, { - "subtask": "null", + "subtask": "End", "subtask_index": 6 + }, + { + "subtask": "null", + "subtask_index": 7 } ], "atomic_actions": [ @@ -123672,23 +132717,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 60, - "total_frames": 15271, + "total_episodes": 49, + "total_frames": 7877, "fps": 30, - "total_tasks": 7, - "total_videos": 240, + "total_tasks": 8, + "total_videos": 196, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "479.18 MB" + "dataset_size": "384.19 MB" }, - "frame_num": 15271, - "dataset_size": "479.18 MB", - "data_structure": "Airbot_MMK2_move_block_wet_wipes_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (48 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 7877, + "dataset_size": "384.19 MB", + "data_structure": "Airbot_MMK2_swap_bbs_cake_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:59" + "train": "0:48" }, "features": { "observation.images.cam_head_rgb": { @@ -124042,7 +133087,94 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_open_door_left": { + "R1_Lite_opening_and_closing_aalcony_sliding_doors": { + "path": "R1_Lite_opening_and_closing_aalcony_sliding_doors", + "dataset_name": "opening_and_closing_aalcony_sliding_doors", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "push", + "pull" + ], + "tasks": "Close the slid door", + "objects": [ + { + "object_name": "sliding_door", + "level1": "furniture", + "level2": "sliding_door", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": null, + "frame_range": "0-133637", + "dataset_size": "6.9GB", + "statistics": { + "total_episodes": 101, + "total_frames": 133637, + "total_tasks": 1, + "total_videos": 303, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "4d71497e-4f98-487e-8f46-e2a076016680", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Close the slid door", + "Open the slid door", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_opening_and_closing_aalcony_sliding_doors_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_opening_and_closing_aalcony_sliding_doors_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Agilex_Cobot_Magic_storage_orange_basket_right": { "task_categories": [ "robotics" ], @@ -124072,11 +133204,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_open_door_left", + "dataset_name": "Agilex_Cobot_Magic_storage_orange_basket_right", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "scene_level1", - "level2": "scene_level2", + "level1": "household", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -124084,9 +133216,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "cabinet", - "level1": "furniture", - "level2": "cabinet", + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_basket", + "level1": "home_storage", + "level2": "brown_basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange", + "level1": "food", + "level2": "orange", "level3": null, "level4": null, "level5": null @@ -124094,51 +133242,55 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "open the cabinet." + "the right gripper grabs oranges and puts them into the basket." ], "sub_tasks": [ { - "subtask": "Touch the door with the left gripper", + "subtask": "Place the orange in the basket with right gripper", "subtask_index": 0 }, { - "subtask": "Open the door with the left gripper", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "End", + "subtask": "Abnormal", "subtask_index": 2 }, { - "subtask": "null", + "subtask": "Grasp the orange with right gripper", "subtask_index": 3 + }, + { + "subtask": "null", + "subtask_index": 4 } ], "atomic_actions": [ - "open" + "grasp", + "lift", + "lower" ], "robot_name": [ - "Airbot_MMK2" + "Agilex_Cobot_Magic" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -124149,23 +133301,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 49, - "total_frames": 6418, + "total_episodes": 98, + "total_frames": 22531, "fps": 30, - "total_tasks": 4, - "total_videos": 196, + "total_tasks": 5, + "total_videos": 294, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "188.62 MB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "233.17 MB" }, - "frame_num": 6418, - "dataset_size": "188.62 MB", - "data_structure": "Airbot_MMK2_open_door_left_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 22531, + "dataset_size": "233.17 MB", + "data_structure": "Agilex_Cobot_Magic_storage_orange_basket_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:48" + "train": "0:97" }, "features": { "observation.images.cam_head_rgb": { @@ -124237,33 +133389,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, "observation.state": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -124272,42 +133401,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -124316,36 +133435,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -124385,17 +133494,17 @@ }, "subtask_annotation": { "names": null, + "dtype": "int32", "shape": [ 5 - ], - "dtype": "int32" + ] }, "scene_annotation": { "names": null, + "dtype": "int32", "shape": [ 1 - ], - "dtype": "int32" + ] }, "eef_sim_pose_state": { "names": [ @@ -124412,10 +133521,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_sim_pose_action": { "names": [ @@ -124432,70 +133541,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" ], - "dtype": "int32" + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } }, "authors": { @@ -124517,11 +133686,11 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Cobot_Magic_cut_banana": { - "path": "Cobot_Magic_cut_banana", - "dataset_name": "cut_banana", + "Cobot_Magic_plate_storage_apple": { + "path": "Cobot_Magic_plate_storage_apple", + "dataset_name": "plate_storage_apple", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -124530,10 +133699,9 @@ "atomic_actions": [ "grasp", "pick", - "cut", "place" ], - "tasks": "Cut the banana with your left hand.", + "tasks": "Grasp the fruit that can keep the doctor away", "objects": [ { "object_name": "table", @@ -124544,193 +133712,35 @@ "level5": null }, { - "object_name": "banana", + "object_name": "apple", "level1": "fruit", - "level2": "banana", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plastic_vegetable_board", - "level1": "tool", - "level2": "plastic_vegetable_board", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "knife_holder", - "level1": "furniture", - "level2": "knife_holder", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "knife", - "level1": "tool", - "level2": "knife", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "fake_banana", - "level1": "toy", - "level2": "fake_banana", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-310367", - "dataset_size": "4.2GB", - "statistics": { - "total_episodes": 583, - "total_frames": 310367, - "total_tasks": 2, - "total_videos": 1749, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "3637d17f-4a5a-4801-902f-3e5aee09f48b", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Cut the banana with your left hand.", - "Cut the banana with your right hand.", - "Hold the blade with your left hand.", - "Discard the waste.", - "Secure the blade with your left hand.", - "Place the knife on the storage rack with your right hand.", - "Grab the knife handle with your right hand.", - "end", - "Lift the knife with your right hand.", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_cut_banana_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_cut_banana_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "AIRBOT_MMK2_item_storage": { - "path": "AIRBOT_MMK2_item_storage", - "dataset_name": "item_storage", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Grasp the umbrella with the left gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "able", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "barrel", - "level1": "container", - "level2": "barrel", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "umbrella", - "level1": "daily_necessities", - "level2": "umbrella", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bottle", - "level1": "container", - "level2": "bottle", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "lid", - "level1": "daily_necessities", - "level2": "lid", + "level2": "apple", "level3": null, "level4": null, "level5": null }, { - "object_name": "bowl", + "object_name": "plate", "level1": "container", - "level2": "bowl", + "level2": "plate", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-18773", - "dataset_size": "717.3MB", + "frame_range": "0-30676", + "dataset_size": "605.5MB", "statistics": { - "total_episodes": 72, - "total_frames": 18773, - "total_tasks": 2, - "total_videos": 288, + "total_episodes": 103, + "total_frames": 30676, + "total_tasks": 1, + "total_videos": 309, "total_chunks": 1, "chunks_size": 1000, - "fps": 30 + "fps": 50 }, - "dataset_uuid": "cfed792e-cf1c-4d1f-b6fc-a022e9be4d5e", + "dataset_uuid": "4c90a2cc-aea8-4468-a7c2-b0afeae50a98", "language": [ "en", "zh" @@ -124739,16 +133749,8 @@ "robotics" ], "sub_tasks": [ - "Grasp the umbrella with the left gripper", - "End", - "Grasp the bowl with the right gripper", - "Grasp the bowl with the left gripper", - "Place the cup into the white basket with the right gripper", - "Static", - "Place the umbrella into the white basket with the left gripper", - "Place the bowl into the yellow basket with the left gripper", - "Place the bowl into the yellow basket with the right gripper", - "Grasp the cup with the right gripper", + "Grasp the fruit that can keep the doctor away", + "Place the picked object into the yellow plate", "null" ], "annotations": { @@ -124786,10 +133788,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_item_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_item_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "Cobot_Magic_plate_storage_apple_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Cobot_Magic_plate_storage_apple_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Galaxea_R1_Lite_mix_color_large_test_tube": { + "Airbot_MMK2_remove_lid": { "task_categories": [ "robotics" ], @@ -124819,11 +133821,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_mix_color_large_test_tube", + "dataset_name": "Airbot_MMK2_remove_lid", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "office_workspace", - "level2": "office", + "level1": "scene_level1", + "level2": "scene_level2", "level3": null, "level4": null, "level5": null @@ -124831,49 +133833,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "red_pigment", - "level1": "materials", - "level2": "red_pigment", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "blue_pigment", - "level1": "materials", - "level2": "blue_pigment", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yellow_pigment", - "level1": "materials", - "level2": "yellow_pigment", + "object_name": "lid", + "level1": "storage_utensils", + "level2": "lid", "level3": null, "level4": null, "level5": null }, { - "object_name": "test_tube_rack", - "level1": "holding_utensils", - "level2": "test_tube_rack", + "object_name": "table", + "level1": "home_storage", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "large_test_tubes", + "object_name": "box", "level1": "laboratory_supplies", - "level2": "large_test_tubes", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "beaker", - "level1": "holding_utensils", - "level2": "beaker", + "level2": "box", "level3": null, "level4": null, "level5": null @@ -124881,80 +133859,54 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up a test tube with pigment on left test tube rack and a test tube with pigment on right test tube rack by grippers and pour them into the beaker." + "pick up the lid of the foam box." ], "sub_tasks": [ { - "subtask": "Place the test tube into the paper cup with the left gripper", + "subtask": "Place the lid of the foam box on the table with your left gripper", "subtask_index": 0 }, { - "subtask": "Pour the blue reagent into the graduated cylinder with the left gripper", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "Pour the red reagent into the graduated cylinder with the left gripper", + "subtask": "Grasp the lid of the foam box with your left gripper", "subtask_index": 2 }, { - "subtask": "Pour the red reagent into the graduated cylinder with the right gripper", + "subtask": "Grasp the lid of the foam box with your right gripper", "subtask_index": 3 }, { - "subtask": "Grasp the blue reagent with the left gripper", + "subtask": "Place the lid of the foam box on the table with your right gripper", "subtask_index": 4 }, - { - "subtask": "End", - "subtask_index": 5 - }, - { - "subtask": "Place the test tube into the paper cup with the right gripper", - "subtask_index": 6 - }, - { - "subtask": "Grasp the yellow reagent with the right gripper", - "subtask_index": 7 - }, - { - "subtask": "Pour the yellow reagent into the graduated cylinder with the right gripper", - "subtask_index": 8 - }, - { - "subtask": "Grasp the red reagent with the left gripper", - "subtask_index": 9 - }, - { - "subtask": "Grasp the red reagent with the right gripper", - "subtask_index": 10 - }, { "subtask": "null", - "subtask_index": 11 + "subtask_index": 5 } ], "atomic_actions": [ - "grasp", - "pick", - "place", - "pour" + "lift", + "place" ], "robot_name": [ - "Galaxea_R1_Lite" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -124971,30 +133923,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 121, - "total_frames": 131656, + "total_episodes": 47, + "total_frames": 11589, "fps": 30, - "total_tasks": 12, - "total_videos": 484, + "total_tasks": 6, + "total_videos": 188, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, + "state_dim": 36, + "action_dim": 36, "camera_views": 4, - "dataset_size": "4.70 GB" + "dataset_size": "403.42 MB" }, - "frame_num": 131656, - "dataset_size": "4.70 GB", - "data_structure": "Galaxea_R1_Lite_mix_color_large_test_tube_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (109 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 11589, + "dataset_size": "403.42 MB", + "data_structure": "Airbot_MMK2_remove_lid_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:120" + "train": "0:46" }, "features": { - "observation.images.cam_head_left_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -125003,8 +133955,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -125013,11 +133965,11 @@ "has_audio": false } }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -125026,8 +133978,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -125036,11 +133988,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -125049,8 +134001,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -125059,11 +134011,11 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_front_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -125072,8 +134024,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -125085,7 +134037,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -125100,14 +134052,36 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -125122,8 +134096,30 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -125274,66 +134270,6 @@ 2 ], "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" } }, "authors": { @@ -125355,9 +134291,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_swap_bbs_cake_plate": { + "Airbot_MMK2_push_plunger": { "task_categories": [ "robotics" ], @@ -125387,11 +134323,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_swap_bbs_cake_plate", + "dataset_name": "Airbot_MMK2_push_plunger", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "kitchen", + "level1": "medical_healthcare", + "level2": "hospital", "level3": null, "level4": null, "level5": null @@ -125399,25 +134335,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "bb_pellets", - "level1": "toys", - "level2": "bb_pellets", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "cake", - "level1": "bread", - "level2": "cake", + "object_name": "table", + "level1": "furniture", + "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "plate", - "level1": "kitchen_supplies", - "level2": "plate", + "object_name": "syringe", + "level1": "medical_supplies", + "level2": "syringe", "level3": null, "level4": null, "level5": null @@ -125425,35 +134353,35 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "take the bbs out of the plate with left hand and put the cake in with right hand." + "pick up the syringe with your left hand, then push it back with your right hand, and finally lower it with your left hand." ], "sub_tasks": [ { - "subtask": "Grasp the cake on the table with the right gripper", + "subtask": "Lift the syringe with the left gripper", "subtask_index": 0 }, { - "subtask": "Place the bullet on the table with the left gripper", + "subtask": "Push the piston with the right gripper", "subtask_index": 1 }, { - "subtask": "Grasp the bullet into the plate with the left gripper", + "subtask": "End", "subtask_index": 2 }, { - "subtask": "Abnormal", + "subtask": "Grasp the syringe with the left gripper", "subtask_index": 3 }, { - "subtask": "Place the cake into the plate with the right gripper", + "subtask": "Grasp the piston with the right gripper", "subtask_index": 4 }, { - "subtask": "Static", + "subtask": "Place the syringe on the table with the left gripper", "subtask_index": 5 }, { - "subtask": "End", + "subtask": "Abnormal", "subtask_index": 6 }, { @@ -125464,7 +134392,8 @@ "atomic_actions": [ "grasp", "pick", - "place" + "place", + "push" ], "robot_name": [ "Airbot_MMK2" @@ -125498,23 +134427,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 49, - "total_frames": 7877, + "total_episodes": 147, + "total_frames": 47091, "fps": 30, "total_tasks": 8, - "total_videos": 196, + "total_videos": 588, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "384.19 MB" + "dataset_size": "1.65 GB" }, - "frame_num": 7877, - "dataset_size": "384.19 MB", - "data_structure": "Airbot_MMK2_swap_bbs_cake_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 47091, + "dataset_size": "1.65 GB", + "data_structure": "Airbot_MMK2_push_plunger_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (135 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:48" + "train": "0:146" }, "features": { "observation.images.cam_head_rgb": { @@ -125868,94 +134797,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "R1_Lite_opening_and_closing_aalcony_sliding_doors": { - "path": "R1_Lite_opening_and_closing_aalcony_sliding_doors", - "dataset_name": "opening_and_closing_aalcony_sliding_doors", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "push", - "pull" - ], - "tasks": "Close the slid door", - "objects": [ - { - "object_name": "sliding_door", - "level1": "furniture", - "level2": "sliding_door", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-133637", - "dataset_size": "6.9GB", - "statistics": { - "total_episodes": 101, - "total_frames": 133637, - "total_tasks": 1, - "total_videos": 303, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "4d71497e-4f98-487e-8f46-e2a076016680", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Close the slid door", - "Open the slid door", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "R1_Lite_opening_and_closing_aalcony_sliding_doors_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_opening_and_closing_aalcony_sliding_doors_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Agilex_Cobot_Magic_storage_orange_basket_right": { + "Agilex_Cobot_Magic_Agilex_Split_Aloha_organize_desk_fail": { "task_categories": [ "robotics" ], @@ -125985,7 +134827,7 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_storage_orange_basket_right", + "dataset_name": "Agilex_Cobot_Magic_Agilex_Split_Aloha_organize_desk_fail", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", @@ -125998,24 +134840,64 @@ "objects": [ { "object_name": "table", - "level1": "home_storage", + "level1": "furniture", "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "brown_basket", + "object_name": "basket", "level1": "home_storage", - "level2": "brown_basket", + "level2": "basket", "level3": null, "level4": null, "level5": null }, { - "object_name": "orange", - "level1": "food", - "level2": "orange", + "object_name": "pen_holder", + "level1": "stationery", + "level2": "pen_holder", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "storage_box", + "level1": "home_storage", + "level2": "storage_box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cola", + "level1": "beverages", + "level2": "cola", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "milk", + "level1": "beverages", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pen", + "level1": "stationery", + "level2": "pen", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "toilet_paper", + "level1": "daily_necessities", + "level2": "toilet_paper", "level3": null, "level4": null, "level5": null @@ -126023,34 +134905,94 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "the right gripper grabs oranges and puts them into the basket." + "take out the empty pen holder by hand, and then put the milk and cola into the basket on the left. place the pen holders in the middle one by one, and then put the papers one by one into the box on the right." ], "sub_tasks": [ { - "subtask": "Place the orange in the basket with right gripper", + "subtask": "Place the red marker pen in pen holder with the right gripper", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Place the orange marker pen in pen holder with the right gripper", "subtask_index": 1 }, { - "subtask": "Abnormal", + "subtask": "Place the waste paper in the box with the right gripper", "subtask_index": 2 }, { - "subtask": "Grasp the orange with right gripper", + "subtask": "End", "subtask_index": 3 }, { - "subtask": "null", + "subtask": "Grasp the black marker pen with the left gripper", "subtask_index": 4 + }, + { + "subtask": "Place the red marker pen in pen holder with the left gripper", + "subtask_index": 5 + }, + { + "subtask": "Grasp the red marker pen with the right gripper", + "subtask_index": 6 + }, + { + "subtask": "Place the cola can in the basket with the left gripper", + "subtask_index": 7 + }, + { + "subtask": "Grasp the black marker pen with the right gripper", + "subtask_index": 8 + }, + { + "subtask": "Grasp the orange marker pen with the right gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the black marker pen in pen holder with the right gripper", + "subtask_index": 10 + }, + { + "subtask": "Place the orange marker pen in pen holder with the left gripper", + "subtask_index": 11 + }, + { + "subtask": "Grasp the cola can with the left gripper", + "subtask_index": 12 + }, + { + "subtask": "Grasp the orange marker pen with the left gripper", + "subtask_index": 13 + }, + { + "subtask": "Place the milk in the basket with the left gripper", + "subtask_index": 14 + }, + { + "subtask": "Grasp the milk with the left gripper", + "subtask_index": 15 + }, + { + "subtask": "Grasp the waste paper with the right gripper", + "subtask_index": 16 + }, + { + "subtask": "Place the black marker pen in pen holder with the left gripper", + "subtask_index": 17 + }, + { + "subtask": "Grasp the red marker pen with the left gripper", + "subtask_index": 18 + }, + { + "subtask": "null", + "subtask_index": 19 } ], "atomic_actions": [ "grasp", - "lift", - "lower" + "pick", + "place" ], "robot_name": [ "Agilex_Cobot_Magic" @@ -126082,23 +135024,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 98, - "total_frames": 22531, + "total_episodes": 117, + "total_frames": 274558, "fps": 30, - "total_tasks": 5, - "total_videos": 294, + "total_tasks": 20, + "total_videos": 351, "total_chunks": 1, "chunks_size": 1000, "state_dim": 26, "action_dim": 26, "camera_views": 3, - "dataset_size": "233.17 MB" + "dataset_size": "4.17 GB" }, - "frame_num": 22531, - "dataset_size": "233.17 MB", - "data_structure": "Agilex_Cobot_Magic_storage_orange_basket_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 274558, + "dataset_size": "4.17 GB", + "data_structure": "Agilex_Split_Aloha_organize_desk_fail_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (105 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:97" + "train": "0:116" }, "features": { "observation.images.cam_head_rgb": { @@ -126469,110 +135411,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Cobot_Magic_plate_storage_apple": { - "path": "Cobot_Magic_plate_storage_apple", - "dataset_name": "plate_storage_apple", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Grasp the fruit that can keep the doctor away", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "apple", - "level1": "fruit", - "level2": "apple", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "plate", - "level1": "container", - "level2": "plate", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-30676", - "dataset_size": "605.5MB", - "statistics": { - "total_episodes": 103, - "total_frames": 30676, - "total_tasks": 1, - "total_videos": 309, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 50 - }, - "dataset_uuid": "4c90a2cc-aea8-4468-a7c2-b0afeae50a98", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the fruit that can keep the doctor away", - "Place the picked object into the yellow plate", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "Cobot_Magic_plate_storage_apple_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Cobot_Magic_plate_storage_apple_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, - "Airbot_MMK2_remove_lid": { + "Airbot_MMK2_storage_gold_bar_model_shark_doll": { "task_categories": [ "robotics" ], @@ -126602,11 +135441,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_remove_lid", + "dataset_name": "Airbot_MMK2_storage_gold_bar_model_shark_doll", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "scene_level1", - "level2": "scene_level2", + "level1": "household", + "level2": "living_room", "level3": null, "level4": null, "level5": null @@ -126614,25 +135453,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "lid", - "level1": "storage_utensils", - "level2": "lid", + "object_name": "paper_boxes", + "level1": "packaging", + "level2": "paper_boxes", "level3": null, "level4": null, "level5": null }, { - "object_name": "table", - "level1": "home_storage", - "level2": "table", + "object_name": "shark_doll", + "level1": "toys", + "level2": "shark_doll", "level3": null, "level4": null, "level5": null }, { - "object_name": "box", - "level1": "laboratory_supplies", - "level2": "box", + "object_name": "gold_bar", + "level1": "metal_products", + "level2": "gold_bar", "level3": null, "level4": null, "level5": null @@ -126640,27 +135479,27 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the lid of the foam box." + "put the shark doll and the gold bar into the paper box respectively with left and right hands." ], "sub_tasks": [ { - "subtask": "Place the lid of the foam box on the table with your left gripper", + "subtask": "End", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Place the whale on the paper box with the left gripper", "subtask_index": 1 }, { - "subtask": "Grasp the lid of the foam box with your left gripper", + "subtask": "Place the gold bar on the paper box with the right gripper", "subtask_index": 2 }, { - "subtask": "Grasp the lid of the foam box with your right gripper", + "subtask": "Grasp the whale with the left gripper", "subtask_index": 3 }, { - "subtask": "Place the lid of the foam box on the table with your right gripper", + "subtask": "Grasp the gold bar with the right gripper", "subtask_index": 4 }, { @@ -126669,7 +135508,8 @@ } ], "atomic_actions": [ - "lift", + "grasp", + "pick", "place" ], "robot_name": [ @@ -126704,23 +135544,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 47, - "total_frames": 11589, + "total_episodes": 44, + "total_frames": 5620, "fps": 30, "total_tasks": 6, - "total_videos": 188, + "total_videos": 176, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "403.42 MB" + "dataset_size": "240.37 MB" }, - "frame_num": 11589, - "dataset_size": "403.42 MB", - "data_structure": "Airbot_MMK2_remove_lid_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 5620, + "dataset_size": "240.37 MB", + "data_structure": "Airbot_MMK2_storage_gold_bar_model_shark_doll_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (32 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:46" + "train": "0:43" }, "features": { "observation.images.cam_head_rgb": { @@ -127074,7 +135914,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_push_plunger": { + "Galaxea_R1_Lite_mix_red_yellow_right": { "task_categories": [ "robotics" ], @@ -127104,11 +135944,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_push_plunger", + "dataset_name": "Galaxea_R1_Lite_mix_red_yellow_right", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "medical_healthcare", - "level2": "hospital", + "level1": "office_workspace", + "level2": "office", "level3": null, "level4": null, "level5": null @@ -127116,17 +135956,49 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "table", - "level1": "furniture", - "level2": "table", + "object_name": "blue_pigment", + "level1": "materials", + "level2": "blue_pigment", "level3": null, "level4": null, "level5": null }, { - "object_name": "syringe", - "level1": "medical_supplies", - "level2": "syringe", + "object_name": "yellow_pigment", + "level1": "materials", + "level2": "yellow_pigment", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "red_pigment", + "level1": "materials", + "level2": "red_pigment", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "test_tube_rack", + "level1": "holding_utensils", + "level2": "test_tube_rack", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "test_tubes", + "level1": "laboratory_supplies", + "level2": "test_tubes", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "beaker", + "level1": "holding_utensils", + "level2": "beaker", "level3": null, "level4": null, "level5": null @@ -127134,64 +136006,88 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the syringe with your left hand, then push it back with your right hand, and finally lower it with your left hand." + "pick up the test tube with yellow pigment and the test tube with red pigment by grippers and pour them into the beaker." ], "sub_tasks": [ { - "subtask": "Lift the syringe with the left gripper", + "subtask": "Place the test tube into the bowl with right gripper", "subtask_index": 0 }, { - "subtask": "Push the piston with the right gripper", + "subtask": "Pick up the test tube containing the yellow reagent with right gripper", "subtask_index": 1 }, { - "subtask": "End", + "subtask": "Place the test tube into the pink bowl with the right gripper", "subtask_index": 2 }, { - "subtask": "Grasp the syringe with the left gripper", + "subtask": "Grasp the yellow reagent with the right gripper", "subtask_index": 3 }, { - "subtask": "Grasp the piston with the right gripper", + "subtask": "Grasp the red reagent with the right gripper", "subtask_index": 4 }, { - "subtask": "Place the syringe on the table with the left gripper", + "subtask": "Pick up the test tube containing the red reagent with right gripper", "subtask_index": 5 }, { - "subtask": "Abnormal", + "subtask": "Place the test tube into the bowl with the right gripper", "subtask_index": 6 }, { - "subtask": "null", + "subtask": "Pour the yellow reagent into the graduated cylinder with the right gripper", "subtask_index": 7 + }, + { + "subtask": "Pour the red reagent into the graduated cylinder with right gripper", + "subtask_index": 8 + }, + { + "subtask": "End", + "subtask_index": 9 + }, + { + "subtask": "Pour the yellow reagent into the graduated cylinder with right gripper", + "subtask_index": 10 + }, + { + "subtask": "Pour the red reagent into the graduated cylinder with the right gripper", + "subtask_index": 11 + }, + { + "subtask": "end", + "subtask_index": 12 + }, + { + "subtask": "null", + "subtask_index": 13 } ], "atomic_actions": [ "grasp", "pick", "place", - "push" + "pour" ], "robot_name": [ - "Airbot_MMK2" + "Galaxea_R1_Lite" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_rgb", + "cam_head_left_rgb", + "cam_head_right_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { - "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -127208,30 +136104,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 147, - "total_frames": 47091, + "total_episodes": 99, + "total_frames": 61864, "fps": 30, - "total_tasks": 8, - "total_videos": 588, + "total_tasks": 14, + "total_videos": 396, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, + "state_dim": 14, + "action_dim": 14, "camera_views": 4, - "dataset_size": "1.65 GB" + "dataset_size": "1.22 GB" }, - "frame_num": 47091, - "dataset_size": "1.65 GB", - "data_structure": "Airbot_MMK2_push_plunger_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (135 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 61864, + "dataset_size": "1.22 GB", + "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_mix_red_yellow_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:146" + "train": "0:98" }, "features": { - "observation.images.cam_head_rgb": { + "observation.images.cam_head_left_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -127240,8 +136136,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -127250,11 +136146,11 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_head_right_rgb": { "dtype": "video", "shape": [ - 480, - 640, + 720, + 1280, 3 ], "names": [ @@ -127263,8 +136159,8 @@ "channels" ], "info": { - "video.height": 480, - "video.width": 640, + "video.height": 720, + "video.width": 1280, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -127273,10 +136169,10 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -127286,7 +136182,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -127296,10 +136192,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 480, + 360, 640, 3 ], @@ -127309,7 +136205,7 @@ "channels" ], "info": { - "video.height": 480, + "video.height": 360, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -127322,51 +136218,29 @@ "observation.state": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", - "left_arm_joint_2_rad", - "left_arm_joint_3_rad", - "left_arm_joint_4_rad", - "left_arm_joint_5_rad", - "left_arm_joint_6_rad", - "right_arm_joint_1_rad", - "right_arm_joint_2_rad", - "right_arm_joint_3_rad", - "right_arm_joint_4_rad", - "right_arm_joint_5_rad", - "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 14 ], "names": [ "left_arm_joint_1_rad", @@ -127381,30 +136255,8 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "left_gripper_open", + "right_gripper_open" ] }, "timestamp": { @@ -127555,6 +136407,66 @@ 2 ], "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" } }, "authors": { @@ -127576,9 +136488,114 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Agilex_Cobot_Magic_Agilex_Split_Aloha_organize_desk_fail": { + "Split_aloha_basket_storage_bread": { + "path": "Split_aloha_basket_storage_bread", + "dataset_name": "basket_storage_bread", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "place", + "pick" + ], + "tasks": "abnormal", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "long_bread", + "level1": "food", + "level2": "long_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "basket", + "level1": "container", + "level2": "basket", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-211779", + "dataset_size": "4.3GB", + "statistics": { + "total_episodes": 497, + "total_frames": 211779, + "total_tasks": 4, + "total_videos": 1491, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "c86fb8b5-8845-4798-9c83-f3a7aec176f7", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "abnormal", + "Pick up the bread", + "Move the basket to the center of view", + "place the bread into the basket", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "Split_aloha_basket_storage_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "Split_aloha_basket_storage_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Agilex_Cobot_Magic_move_object_green_tablecloth": { "task_categories": [ "robotics" ], @@ -127608,11 +136625,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Agilex_Cobot_Magic_Agilex_Split_Aloha_organize_desk_fail", + "dataset_name": "Agilex_Cobot_Magic_move_object_green_tablecloth", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "living_room", + "level1": "commercial & convenience", + "level2": "supermarket", "level3": null, "level4": null, "level5": null @@ -127621,64 +136638,240 @@ "objects": [ { "object_name": "table", - "level1": "furniture", + "level1": "home_storage", "level2": "table", "level3": null, "level4": null, "level5": null }, { - "object_name": "basket", - "level1": "home_storage", - "level2": "basket", + "object_name": "green_table_cloths", + "level1": "laboratory_supplies", + "level2": "green_table_cloths", "level3": null, "level4": null, "level5": null }, { - "object_name": "pen_holder", + "object_name": "waffle", + "level1": "food", + "level2": "waffle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_lemon", + "level1": "food", + "level2": "green_lemon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eggplant", + "level1": "food", + "level2": "eggplant", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chocolate", + "level1": "food", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mango", + "level1": "food", + "level2": "mango", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mint_candy", + "level1": "food", + "level2": "mint_candy", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mangosteen", + "level1": "food", + "level2": "mangosteen", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange", + "level1": "food", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread", + "level1": "food", + "level2": "bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "food", + "level2": "banana", + "level3": null, + "level4": "Fruit cake", + "level5": null + }, + { + "object_name": "cake", + "level1": "food", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "beef_cheeseburger", + "level1": "food", + "level2": "beef_cheeseburger", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "kitchen_supplies", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pan", + "level1": "kitchen_supplies", + "level2": "pan", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "small_teapot", + "level1": "kitchen_supplies", + "level2": "small_teapot", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "small_teacup", + "level1": "kitchen_supplies", + "level2": "small_teacup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "paper_ball", + "level1": "trash", + "level2": "paper_ball", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_square_towel", + "level1": "daily_necessities", + "level2": "brown_square_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "black_cylindrical_pen_holder", "level1": "stationery", - "level2": "pen_holder", + "level2": "black_cylindrical_pen_holder", "level3": null, "level4": null, "level5": null }, { - "object_name": "storage_box", - "level1": "home_storage", - "level2": "storage_box", + "object_name": "pink_long_towel", + "level1": "daily_necessities", + "level2": "pink_long_towel", "level3": null, "level4": null, "level5": null }, { - "object_name": "cola", - "level1": "beverages", - "level2": "cola", + "object_name": "whiteboard_eraser", + "level1": "stationery", + "level2": "whiteboard_eraser", "level3": null, "level4": null, "level5": null }, { - "object_name": "milk", - "level1": "beverages", - "level2": "milk", + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_necessities", + "level2": "mentholatum_facial_cleanser", "level3": null, "level4": null, "level5": null }, { - "object_name": "pen", + "object_name": "duck", + "level1": "toys", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "compass", "level1": "stationery", - "level2": "pen", + "level2": "compass", "level3": null, "level4": null, "level5": null }, { - "object_name": "toilet_paper", + "object_name": "bowl", + "level1": "kitchen_supplies", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_long_towel", "level1": "daily_necessities", - "level2": "toilet_paper", + "level2": "blue_long_towel", "level3": null, "level4": null, "level5": null @@ -127686,94 +136879,746 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "take out the empty pen holder by hand, and then put the milk and cola into the basket on the left. place the pen holders in the middle one by one, and then put the papers one by one into the box on the right." + "the gripper move the object." ], "sub_tasks": [ { - "subtask": "Place the red marker pen in pen holder with the right gripper", + "subtask": "Place the XX on the table with the left gripper", "subtask_index": 0 }, { - "subtask": "Place the orange marker pen in pen holder with the right gripper", + "subtask": "Grasp the blue blackboard earser with the right gripper\n", "subtask_index": 1 }, { - "subtask": "Place the waste paper in the box with the right gripper", + "subtask": "Grasp the square chewing gun with the right gripper ", "subtask_index": 2 }, { - "subtask": "End", + "subtask": "Grasp the pen container with the right gripper\n", "subtask_index": 3 }, { - "subtask": "Grasp the black marker pen with the left gripper", + "subtask": "Grasp the blue bowl with the left gripper\n", "subtask_index": 4 }, { - "subtask": "Place the red marker pen in pen holder with the left gripper", + "subtask": "Place the hard blackbaord cleanser on the table with the left gripper\n", "subtask_index": 5 }, { - "subtask": "Grasp the red marker pen with the right gripper", + "subtask": "Place the blue towel on the table with the right gripper\n", "subtask_index": 6 }, { - "subtask": "Place the cola can in the basket with the left gripper", + "subtask": "Grasp the orange with the right gripper\n", "subtask_index": 7 }, { - "subtask": "Grasp the black marker pen with the right gripper", + "subtask": "Grasp the white blackboard earser with the left gripper\n", "subtask_index": 8 }, { - "subtask": "Grasp the orange marker pen with the right gripper", + "subtask": "Place the mangosteen on the table with the left gripper\n", "subtask_index": 9 }, { - "subtask": "Place the black marker pen in pen holder with the right gripper", + "subtask": "Grasp the pen container with the right gripper", "subtask_index": 10 }, { - "subtask": "Place the orange marker pen in pen holder with the left gripper", + "subtask": "Place the pen container on the table with the right gripper", "subtask_index": 11 }, { - "subtask": "Grasp the cola can with the left gripper", + "subtask": "Grasp the white blackboard earser with left gripper", "subtask_index": 12 }, { - "subtask": "Grasp the orange marker pen with the left gripper", + "subtask": "Place the blue bowl on the table with the right gripper", "subtask_index": 13 }, { - "subtask": "Place the milk in the basket with the left gripper", + "subtask": "Place the eggplant on the table with the right gripper", "subtask_index": 14 }, { - "subtask": "Grasp the milk with the left gripper", + "subtask": "Place the blue blackboard earser on the table with the right gripper\n", "subtask_index": 15 }, { - "subtask": "Grasp the waste paper with the right gripper", + "subtask": "Place the blue blackboard earser on the table with the right gripper\n", "subtask_index": 16 }, { - "subtask": "Place the black marker pen in pen holder with the left gripper", + "subtask": "Place the orange on the table with the right gripper\n", "subtask_index": 17 }, { - "subtask": "Grasp the red marker pen with the left gripper", + "subtask": "Place the blue bowl on the table with the right gripper\n", "subtask_index": 18 }, { - "subtask": "null", + "subtask": "Place the brown towel on the table with the left gripper\n", "subtask_index": 19 + }, + { + "subtask": "Grasp the cyan cup with the right gripper ", + "subtask_index": 20 + }, + { + "subtask": "Place the compasses on the table with the right gripper", + "subtask_index": 21 + }, + { + "subtask": "Grasp the compasses with the right gripper", + "subtask_index": 22 + }, + { + "subtask": "Grasp the green lemon with the right gripper\n", + "subtask_index": 23 + }, + { + "subtask": "Grasp the cyan cup with the left gripper\n", + "subtask_index": 24 + }, + { + "subtask": "Place the wallfe on the table with the left gripper\n", + "subtask_index": 25 + }, + { + "subtask": "Place the blue bowl on the table with the left gripper\n", + "subtask_index": 26 + }, + { + "subtask": "Grasp the fruit candy with the left gripper", + "subtask_index": 27 + }, + { + "subtask": "Grasp the orange with the right gripper", + "subtask_index": 28 + }, + { + "subtask": "Place the green lemon on the table with the right gripper", + "subtask_index": 29 + }, + { + "subtask": "Grasp the cyan cup with the right gripper\n", + "subtask_index": 30 + }, + { + "subtask": "Grasp the wallfe with the right gripper\n", + "subtask_index": 31 + }, + { + "subtask": "Grasp the green lemon with the left gripper\n", + "subtask_index": 32 + }, + { + "subtask": "Place the white blackboard earser on the table with the right gripper\n", + "subtask_index": 33 + }, + { + "subtask": "\nGrasp the green lemon with the right gripper", + "subtask_index": 34 + }, + { + "subtask": "Grasp the square chewing gun with the left gripper\n", + "subtask_index": 35 + }, + { + "subtask": "Grasp the square chewing gum with the left gripper", + "subtask_index": 36 + }, + { + "subtask": "Grasp the with cyan cup the left gripper\n", + "subtask_index": 37 + }, + { + "subtask": "Place the blue bowl on the table with the left gripper", + "subtask_index": 38 + }, + { + "subtask": "Grasp the tea[ot with the left gripper\n", + "subtask_index": 39 + }, + { + "subtask": "Grasp the cyan cup with the right gripper ", + "subtask_index": 40 + }, + { + "subtask": "Place the wallfe on the table with the right gripper\n", + "subtask_index": 41 + }, + { + "subtask": "Place the pen container on the table with the right gripper\n", + "subtask_index": 42 + }, + { + "subtask": "Place the eggplant on the table with the left gripper", + "subtask_index": 43 + }, + { + "subtask": "Grasp the hard facial cleanser with the right gripper\n", + "subtask_index": 44 + }, + { + "subtask": "Place the ornage on the table with the right gripper\n", + "subtask_index": 45 + }, + { + "subtask": "Grasp the pen container with the left gripper\n", + "subtask_index": 46 + }, + { + "subtask": "Place the fruit candy on the table with the left gripper", + "subtask_index": 47 + }, + { + "subtask": "Grasp the hard facial cleanser with the left gripper", + "subtask_index": 48 + }, + { + "subtask": "Grasp the chocolate with the right gripper", + "subtask_index": 49 + }, + { + "subtask": "Grasp the blue blackboard earser with the right gripper ", + "subtask_index": 50 + }, + { + "subtask": "Grasp the mangosteen with the left gripper", + "subtask_index": 51 + }, + { + "subtask": "Place the square chewing gun on the table with the right gripper\n", + "subtask_index": 52 + }, + { + "subtask": "Grasp the brown towel with the left gripper\n", + "subtask_index": 53 + }, + { + "subtask": "Place the brown towel on the table with the left gripper\n", + "subtask_index": 54 + }, + { + "subtask": "Grasp the brown towel with the right gripper mangosteen\n", + "subtask_index": 55 + }, + { + "subtask": "Grasp the blue bowl with the right gripper\n", + "subtask_index": 56 + }, + { + "subtask": "Place the teapot on the table with the left gripper", + "subtask_index": 57 + }, + { + "subtask": "Grasp the blue cup with the left gripper\n", + "subtask_index": 58 + }, + { + "subtask": "Grasp the teacup with the right gripper", + "subtask_index": 59 + }, + { + "subtask": "Grasp the square chewing gun with the right gripper", + "subtask_index": 60 + }, + { + "subtask": "Grasp the teapot with the right gripper", + "subtask_index": 61 + }, + { + "subtask": "Place the pink towel on the table with the left gripper", + "subtask_index": 62 + }, + { + "subtask": "Grasp the blue bowl with the left gripper", + "subtask_index": 63 + }, + { + "subtask": "Place the waffle on the table with the right gripper", + "subtask_index": 64 + }, + { + "subtask": "Place the pen container on the table with the right gripper", + "subtask_index": 65 + }, + { + "subtask": "Place the mangosteen on the table with the right gripper", + "subtask_index": 66 + }, + { + "subtask": "Grasp the pen container with the left gripper\n", + "subtask_index": 67 + }, + { + "subtask": "Grasp the eggplant with the right gripper", + "subtask_index": 68 + }, + { + "subtask": "Grasp the white blackboard earser with the right gripper ", + "subtask_index": 69 + }, + { + "subtask": "Grasp the green lemon with the right gripper", + "subtask_index": 70 + }, + { + "subtask": "Place the eggplant on the table with the left gripper\n", + "subtask_index": 71 + }, + { + "subtask": "Grasp the eggplant with the left gripper", + "subtask_index": 72 + }, + { + "subtask": "Place the hard facial cleanser on the table with the left gripper", + "subtask_index": 73 + }, + { + "subtask": "Grasp the hard facial cleanser with the right gripper ", + "subtask_index": 74 + }, + { + "subtask": "Grasp the mangosteen with the left gripper\n", + "subtask_index": 75 + }, + { + "subtask": "Place the mangosteen on the table with the right gripper\n", + "subtask_index": 76 + }, + { + "subtask": "End", + "subtask_index": 77 + }, + { + "subtask": "Grasp the wallfe with the right gripper ", + "subtask_index": 78 + }, + { + "subtask": "Grasp the white blackboard erasure with the right gripper", + "subtask_index": 79 + }, + { + "subtask": "Grasp the blue blackboard erasure with the left gripper", + "subtask_index": 80 + }, + { + "subtask": "Grasp the white blackboard earser with the right gripper\n", + "subtask_index": 81 + }, + { + "subtask": "Place the orange on the table with the left gripper\n", + "subtask_index": 82 + }, + { + "subtask": "Place the fruit candy on the table with the right gripper", + "subtask_index": 83 + }, + { + "subtask": "Grasp the brown towel with the right gripper\n", + "subtask_index": 84 + }, + { + "subtask": "Grasp the orange with the left gripper\n", + "subtask_index": 85 + }, + { + "subtask": "Place the compass on the table with the right gripper\n", + "subtask_index": 86 + }, + { + "subtask": "Grasp the tea cup with the left gripper\n", + "subtask_index": 87 + }, + { + "subtask": "Place the hard facial cleanser on the table with the left gripper\n", + "subtask_index": 88 + }, + { + "subtask": "Place the brown towel on the table with the right gripper\n", + "subtask_index": 89 + }, + { + "subtask": "Grasp the eggplant with the left gripper\n", + "subtask_index": 90 + }, + { + "subtask": "Place the pen container on the table with the left gripper", + "subtask_index": 91 + }, + { + "subtask": "Grasp the white blackboard earser with the right gripper\n", + "subtask_index": 92 + }, + { + "subtask": "Grasp the square chewing gun with the right gripper ", + "subtask_index": 93 + }, + { + "subtask": "Place the cyan cup on the table with the left gripper\n", + "subtask_index": 94 + }, + { + "subtask": "Grasp the waffle with the right gripper", + "subtask_index": 95 + }, + { + "subtask": "Grasp the blue bowl with the right gripper", + "subtask_index": 96 + }, + { + "subtask": "Grasp the square chewing gun with the right gripper ", + "subtask_index": 97 + }, + { + "subtask": "Place the waffle on the table with the right gripper\n", + "subtask_index": 98 + }, + { + "subtask": "Grasp the blue bowl with the right gripper ", + "subtask_index": 99 + }, + { + "subtask": "Abnormal", + "subtask_index": 100 + }, + { + "subtask": "Grasp the white blackboard earser with the right gripper mangosteen\n", + "subtask_index": 101 + }, + { + "subtask": "Grasp the blue towel with the right gripper \n", + "subtask_index": 102 + }, + { + "subtask": "Grasp the brown towel with the right gripper\n", + "subtask_index": 103 + }, + { + "subtask": "Place the pen container on the table with the right gripper\n", + "subtask_index": 104 + }, + { + "subtask": "Place the square chewing gum on the table with the right gripper", + "subtask_index": 105 + }, + { + "subtask": "Place the green lemon on the table with the right gripper\n", + "subtask_index": 106 + }, + { + "subtask": "Place the chocolate on the table with the right gripper\n", + "subtask_index": 107 + }, + { + "subtask": "Grasp the square facial square with the right gripper ", + "subtask_index": 108 + }, + { + "subtask": "Grasp the teapot with the left gripper\n", + "subtask_index": 109 + }, + { + "subtask": "Place the pen container on the table with the left gripper\n", + "subtask_index": 110 + }, + { + "subtask": "Place the hard facial earser on the table with the right gripper\n", + "subtask_index": 111 + }, + { + "subtask": "Place the teacup on the table with the left gripper", + "subtask_index": 112 + }, + { + "subtask": "Place the sqaure chewing gun on the table with the right gripper\n", + "subtask_index": 113 + }, + { + "subtask": "Place the blue cup on the table with the left gripper\n", + "subtask_index": 114 + }, + { + "subtask": "Place the blue blackboard erasure on the table with the left gripper", + "subtask_index": 115 + }, + { + "subtask": "Grasp the wallfe with the right gripper ", + "subtask_index": 116 + }, + { + "subtask": "Grasp the cyan cup with the right gripper", + "subtask_index": 117 + }, + { + "subtask": "Grasp the pink towel with the left gripper", + "subtask_index": 118 + }, + { + "subtask": "Grasp the mangosteen with the right gripper", + "subtask_index": 119 + }, + { + "subtask": "Place the cyan cup on the table with the left gripper\n", + "subtask_index": 120 + }, + { + "subtask": "Place the wallfe on the table with the right gripper\n", + "subtask_index": 121 + }, + { + "subtask": "Grasp the square chewing gun with the left gripper\n", + "subtask_index": 122 + }, + { + "subtask": "Grasp the blue bowel with the right gripper ", + "subtask_index": 123 + }, + { + "subtask": "Grasp the orange with the left gripper", + "subtask_index": 124 + }, + { + "subtask": "Place the sqaure chewing gun on the table with the right gripper\n", + "subtask_index": 125 + }, + { + "subtask": "Grasp the sqaure chewing gun with the right gripper\n", + "subtask_index": 126 + }, + { + "subtask": "Place the orange on the table with the right gripper", + "subtask_index": 127 + }, + { + "subtask": "Place the white blackboard earser on the table with the left gripper\n", + "subtask_index": 128 + }, + { + "subtask": "Place the teacup on the table with the left gripper\n", + "subtask_index": 129 + }, + { + "subtask": "Grasp the pen container with the right gripper", + "subtask_index": 130 + }, + { + "subtask": "Grasp the blue blackboard earser with the left gripper\n", + "subtask_index": 131 + }, + { + "subtask": "Place the teapot on the table with the right gripper", + "subtask_index": 132 + }, + { + "subtask": "Grasp the wallfe with the right gripper ", + "subtask_index": 133 + }, + { + "subtask": "Place the mangosteen on the table with the left gripper", + "subtask_index": 134 + }, + { + "subtask": "Place the square chewing gun on the table with the left gripper\n", + "subtask_index": 135 + }, + { + "subtask": "Grasp the fruit candy with the right gripper", + "subtask_index": 136 + }, + { + "subtask": "Grasp the square chewing gum with the right gripper", + "subtask_index": 137 + }, + { + "subtask": "Grasp the compass with the right gripper\n", + "subtask_index": 138 + }, + { + "subtask": "Place the teapot on the table with the left gripper\n", + "subtask_index": 139 + }, + { + "subtask": "Grasp the cyan cup with the left gripper\n", + "subtask_index": 140 + }, + { + "subtask": "Place the teacup on the table with the right gripper", + "subtask_index": 141 + }, + { + "subtask": "Grasp the eggplant with the right gripper\n", + "subtask_index": 142 + }, + { + "subtask": "Grasp the pen container with the right gripper mangosteen\n", + "subtask_index": 143 + }, + { + "subtask": "Grasp the mangosteen with the right gripper ", + "subtask_index": 144 + }, + { + "subtask": "Place the green lemon on the table with the left gripper\n", + "subtask_index": 145 + }, + { + "subtask": "Place the pen containeron the table with the left gripper", + "subtask_index": 146 + }, + { + "subtask": "Place the white blackboard erasure on the table with the right gripper", + "subtask_index": 147 + }, + { + "subtask": "Grasp the hard facial cleanser with the left gripper\n", + "subtask_index": 148 + }, + { + "subtask": "Grasp the whiite blackboard earser with the right gripper\n", + "subtask_index": 149 + }, + { + "subtask": "Grasp the wallfe with the left gripper\n", + "subtask_index": 150 + }, + { + "subtask": "Grasp the wallfe with the right gripper mangosteen\n", + "subtask_index": 151 + }, + { + "subtask": "Grasp the chocolate with the right gripper ", + "subtask_index": 152 + }, + { + "subtask": "Place the cyan cup on the table with the left gripper\n", + "subtask_index": 153 + }, + { + "subtask": "Grasp the cyan cup with the right gripper\n", + "subtask_index": 154 + }, + { + "subtask": "Place the green lemon on the table with the right gripper\n", + "subtask_index": 155 + }, + { + "subtask": "Place the chocolate on the table with the left gripper", + "subtask_index": 156 + }, + { + "subtask": "Grasp the cyan cup with the right gripper mangosteen\n", + "subtask_index": 157 + }, + { + "subtask": "Grasp the eggplant with the right gripper ", + "subtask_index": 158 + }, + { + "subtask": "Grasp the white blackboard earser with the left gripper\n", + "subtask_index": 159 + }, + { + "subtask": "Place the hard facial cleanser on the table with the right gripper\n", + "subtask_index": 160 + }, + { + "subtask": "Place the blue blackboard earser on the table with the left gripper\n", + "subtask_index": 161 + }, + { + "subtask": "Place the eggplant on the table with the right gripper\n", + "subtask_index": 162 + }, + { + "subtask": "Grasp the chocolate with the right gripper\n", + "subtask_index": 163 + }, + { + "subtask": "Grasp the chocolate with the left gripper", + "subtask_index": 164 + }, + { + "subtask": "Place the orange on the table with the left gripper", + "subtask_index": 165 + }, + { + "subtask": "Grasp the XX with the left gripper", + "subtask_index": 166 + }, + { + "subtask": "Place the pen container on the table with the left gripper", + "subtask_index": 167 + }, + { + "subtask": "Place the chocolate on the table with the right gripper", + "subtask_index": 168 + }, + { + "subtask": "Place the white blackboard earser on the table with the left gripper", + "subtask_index": 169 + }, + { + "subtask": "Grasp the chocolate with the left gripper\n", + "subtask_index": 170 + }, + { + "subtask": "Grasp the pen container with the left gripper", + "subtask_index": 171 + }, + { + "subtask": "Place the sqaure chewing gun on the table with the left gripper\n", + "subtask_index": 172 + }, + { + "subtask": "Place the tea cup on the table with the left gripper\n", + "subtask_index": 173 + }, + { + "subtask": "Place the chocolate on the table with the right gripper\n", + "subtask_index": 174 + }, + { + "subtask": "Grasp the teacup with the left gripper\n", + "subtask_index": 175 + }, + { + "subtask": "Place the square chewing gum on the table with the left gripper", + "subtask_index": 176 + }, + { + "subtask": "Grasp the brown towel with the right gripper ", + "subtask_index": 177 + }, + { + "subtask": "Grasp the teapot with the left gripper", + "subtask_index": 178 + }, + { + "subtask": "Grasp the teacup with the left gripper", + "subtask_index": 179 + }, + { + "subtask": "Place the chocolate on the table with the left gripper\n", + "subtask_index": 180 + }, + { + "subtask": "Place the cyan cup on the table with the right gripper\n", + "subtask_index": 181 + }, + { + "subtask": "null", + "subtask_index": 182 } ], "atomic_actions": [ "grasp", - "pick", - "place" + "lift", + "lower" ], "robot_name": [ "Agilex_Cobot_Magic" @@ -127805,23 +137650,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 117, - "total_frames": 274558, + "total_episodes": 197, + "total_frames": 85405, "fps": 30, - "total_tasks": 20, - "total_videos": 351, + "total_tasks": 183, + "total_videos": 591, "total_chunks": 1, "chunks_size": 1000, "state_dim": 26, "action_dim": 26, "camera_views": 3, - "dataset_size": "4.17 GB" + "dataset_size": "5.33 GB" }, - "frame_num": 274558, - "dataset_size": "4.17 GB", - "data_structure": "Agilex_Split_Aloha_organize_desk_fail_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (105 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 85405, + "dataset_size": "5.33 GB", + "data_structure": "Agilex_Cobot_Magic_move_object_green_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (185 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:116" + "train": "0:196" }, "features": { "observation.images.cam_head_rgb": { @@ -128192,7 +138037,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, - "Airbot_MMK2_storage_gold_bar_model_shark_doll": { + "Airbot_MMK2_storage_onion_sweet_potato": { "task_categories": [ "robotics" ], @@ -128222,11 +138067,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_gold_bar_model_shark_doll", + "dataset_name": "Airbot_MMK2_storage_onion_sweet_potato", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "living_room", + "level2": "Kitchen", "level3": null, "level4": null, "level5": null @@ -128234,25 +138079,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "paper_boxes", - "level1": "packaging", - "level2": "paper_boxes", + "object_name": "potato", + "level1": "vegetables", + "level2": "potato", "level3": null, "level4": null, "level5": null }, { - "object_name": "shark_doll", - "level1": "toys", - "level2": "shark_doll", + "object_name": "onion", + "level1": "vegetables", + "level2": "onion", "level3": null, "level4": null, "level5": null }, { - "object_name": "gold_bar", - "level1": "metal_products", - "level2": "gold_bar", + "object_name": "storage_box", + "level1": "storage_utensils", + "level2": "storage_box", "level3": null, "level4": null, "level5": null @@ -128260,27 +138105,27 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "put the shark doll and the gold bar into the paper box respectively with left and right hands." + "pick up the potato with left hand and put it in the storage box, and pick up the onion with right hand and put it in the storage box." ], "sub_tasks": [ { - "subtask": "End", + "subtask": "Grasp the eggplant with the right gripper", "subtask_index": 0 }, { - "subtask": "Place the whale on the paper box with the left gripper", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "Place the gold bar on the paper box with the right gripper", + "subtask": "Place the sweet potato into the left compartment of the storage box with the left gripper", "subtask_index": 2 }, { - "subtask": "Grasp the whale with the left gripper", + "subtask": "Place the eggplant into the right compartment of the storage box with the right gripper", "subtask_index": 3 }, { - "subtask": "Grasp the gold bar with the right gripper", + "subtask": "Grasp the sweet potato with the left gripper", "subtask_index": 4 }, { @@ -128289,9 +138134,10 @@ } ], "atomic_actions": [ - "grasp", + "pinch", "pick", - "place" + "place", + "grasp" ], "robot_name": [ "Airbot_MMK2" @@ -128325,23 +138171,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 44, - "total_frames": 5620, + "total_episodes": 50, + "total_frames": 6502, "fps": 30, "total_tasks": 6, - "total_videos": 176, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "240.37 MB" + "dataset_size": "173.29 MB" }, - "frame_num": 5620, - "dataset_size": "240.37 MB", - "data_structure": "Airbot_MMK2_storage_gold_bar_model_shark_doll_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (32 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 6502, + "dataset_size": "173.29 MB", + "data_structure": "Airbot_MMK2_storage_onion_sweet_potato_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:43" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -128695,7 +138541,7 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Galaxea_R1_Lite_mix_red_yellow_right": { + "Airbot_MMK2_storage_potato_left": { "task_categories": [ "robotics" ], @@ -128725,11 +138571,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_mix_red_yellow_right", + "dataset_name": "Airbot_MMK2_storage_potato_left", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "office_workspace", - "level2": "office", + "level1": "household", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -128737,49 +138583,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "blue_pigment", - "level1": "materials", - "level2": "blue_pigment", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "yellow_pigment", - "level1": "materials", - "level2": "yellow_pigment", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "red_pigment", - "level1": "materials", - "level2": "red_pigment", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "test_tube_rack", - "level1": "holding_utensils", - "level2": "test_tube_rack", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "test_tubes", - "level1": "laboratory_supplies", - "level2": "test_tubes", + "object_name": "potato", + "level1": "vegetables", + "level2": "potato", "level3": null, "level4": null, "level5": null }, { - "object_name": "beaker", - "level1": "holding_utensils", - "level2": "beaker", + "object_name": "storage_box", + "level1": "storage_utensils", + "level2": "storage_box", "level3": null, "level4": null, "level5": null @@ -128787,88 +138601,63 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the test tube with yellow pigment and the test tube with red pigment by grippers and pour them into the beaker." + "pick up the potato with left hand and put it in the storage box." ], "sub_tasks": [ { - "subtask": "Place the test tube into the bowl with right gripper", + "subtask": "Grasp the potato with the right gripper", "subtask_index": 0 }, { - "subtask": "Pick up the test tube containing the yellow reagent with right gripper", + "subtask": "Grasp the potato with the left gripper", "subtask_index": 1 }, { - "subtask": "Place the test tube into the pink bowl with the right gripper", + "subtask": "Abnormal", "subtask_index": 2 }, { - "subtask": "Grasp the yellow reagent with the right gripper", + "subtask": "Place the potato into the left compartment of the storage box with the left gripper", "subtask_index": 3 }, { - "subtask": "Grasp the red reagent with the right gripper", + "subtask": "End", "subtask_index": 4 }, { - "subtask": "Pick up the test tube containing the red reagent with right gripper", + "subtask": "Static", "subtask_index": 5 }, { - "subtask": "Place the test tube into the bowl with the right gripper", + "subtask": "Place the potato into the right compartment of the storage box with the right gripper", "subtask_index": 6 }, - { - "subtask": "Pour the yellow reagent into the graduated cylinder with the right gripper", - "subtask_index": 7 - }, - { - "subtask": "Pour the red reagent into the graduated cylinder with right gripper", - "subtask_index": 8 - }, - { - "subtask": "End", - "subtask_index": 9 - }, - { - "subtask": "Pour the yellow reagent into the graduated cylinder with right gripper", - "subtask_index": 10 - }, - { - "subtask": "Pour the red reagent into the graduated cylinder with the right gripper", - "subtask_index": 11 - }, - { - "subtask": "end", - "subtask_index": 12 - }, { "subtask": "null", - "subtask_index": 13 + "subtask_index": 7 } ], "atomic_actions": [ "grasp", "pick", - "place", - "pour" + "place" ], "robot_name": [ - "Galaxea_R1_Lite" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -128885,30 +138674,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 99, - "total_frames": 61864, + "total_episodes": 69, + "total_frames": 8728, "fps": 30, - "total_tasks": 14, - "total_videos": 396, + "total_tasks": 8, + "total_videos": 276, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, + "state_dim": 36, + "action_dim": 36, "camera_views": 4, - "dataset_size": "1.22 GB" + "dataset_size": "322.26 MB" }, - "frame_num": 61864, - "dataset_size": "1.22 GB", - "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_mix_red_yellow_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 8728, + "dataset_size": "322.26 MB", + "data_structure": "Airbot_MMK2_Airbot_MMK2_storage_potato_left_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (57 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:98" + "train": "0:68" }, "features": { - "observation.images.cam_head_left_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -128917,8 +138706,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -128927,11 +138716,11 @@ "has_audio": false } }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -128940,8 +138729,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -128950,10 +138739,10 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -128963,7 +138752,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -128973,10 +138762,10 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_front_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -128986,7 +138775,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -128999,7 +138788,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -129014,14 +138803,36 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -129036,8 +138847,30 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -129188,66 +139021,6 @@ 2 ], "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" } }, "authors": { @@ -129269,11 +139042,11 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Split_aloha_basket_storage_bread": { - "path": "Split_aloha_basket_storage_bread", - "dataset_name": "basket_storage_bread", + "AgiBot-g1_storage_item_e": { + "path": "AgiBot-g1_storage_item_e", + "dataset_name": "storage_item_e", "robot_type": "", "end_effector_type": [ "two_finger_gripper" @@ -129284,7 +139057,7 @@ "place", "pick" ], - "tasks": "abnormal", + "tasks": "Grab and lift both the mouse and power cord from the accessory packaging area at the same time", "objects": [ { "object_name": "table", @@ -129295,35 +139068,43 @@ "level5": null }, { - "object_name": "long_bread", - "level1": "food", - "level2": "long_bread", + "object_name": "data_cable", + "level1": "tool", + "level2": "data_cable", "level3": null, "level4": null, "level5": null }, { - "object_name": "basket", + "object_name": "box", "level1": "container", - "level2": "basket", + "level2": "box", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mouse", + "level1": "tool", + "level2": "mouse", "level3": null, "level4": null, "level5": null } ], - "operation_platform_height": 77.2, - "frame_range": "0-211779", - "dataset_size": "4.3GB", + "operation_platform_height": null, + "frame_range": "0-212067", + "dataset_size": "92.8GB", "statistics": { - "total_episodes": 497, - "total_frames": 211779, - "total_tasks": 4, - "total_videos": 1491, + "total_episodes": 507, + "total_frames": 212067, + "total_tasks": 1, + "total_videos": 4056, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "c86fb8b5-8845-4798-9c83-f3a7aec176f7", + "dataset_uuid": "6d8ca1f6-f9cf-4b0c-bef5-fca799793086", "language": [ "en", "zh" @@ -129332,10 +139113,8 @@ "robotics" ], "sub_tasks": [ - "abnormal", - "Pick up the bread", - "Move the basket to the center of view", - "place the bread into the basket", + "Grab and lift both the mouse and power cord from the accessory packaging area at the same time", + "Place the mouse and power cord into the box", "null" ], "annotations": { @@ -129373,10 +139152,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "Split_aloha_basket_storage_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "Split_aloha_basket_storage_bread_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AgiBot-g1_storage_item_e_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AgiBot-g1_storage_item_e_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_storage_potato_left": { + "Airbot_MMK2_doodled_line": { "task_categories": [ "robotics" ], @@ -129406,11 +139185,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_potato_left", + "dataset_name": "Airbot_MMK2_doodled_line", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "kitchen", + "level1": "other", + "level2": "laboratory", "level3": null, "level4": null, "level5": null @@ -129418,17 +139197,17 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "potato", - "level1": "vegetables", - "level2": "potato", + "object_name": "ballpoint_pen", + "level1": "stationery", + "level2": "ballpoint_pen", "level3": null, "level4": null, "level5": null }, { - "object_name": "storage_box", - "level1": "storage_utensils", - "level2": "storage_box", + "object_name": "square_building_blocks", + "level1": "toys", + "level2": "square_building_blocks", "level3": null, "level4": null, "level5": null @@ -129436,35 +139215,35 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the potato with left hand and put it in the storage box." + "Pick up the ballpoint pen and leave your handwriting on the paper." ], "sub_tasks": [ { - "subtask": "Grasp the potato with the right gripper", + "subtask": "Close the pen switch with the right gripper", "subtask_index": 0 }, { - "subtask": "Grasp the potato with the left gripper", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "Abnormal", + "subtask": "Lift the pen with the right gripper", "subtask_index": 2 }, { - "subtask": "Place the potato into the left compartment of the storage box with the left gripper", + "subtask": "Grasp the pen with the right gripper", "subtask_index": 3 }, { - "subtask": "End", + "subtask": "Place the pen on the table with the right gripper", "subtask_index": 4 }, { - "subtask": "Static", + "subtask": "Write on paper with a pen with right gripper", "subtask_index": 5 }, { - "subtask": "Place the potato into the right compartment of the storage box with the right gripper", + "subtask": "Open the pen switch with the right gripper", "subtask_index": 6 }, { @@ -129475,7 +139254,9 @@ "atomic_actions": [ "grasp", "pick", - "place" + "place", + "pressbutton", + "write" ], "robot_name": [ "Airbot_MMK2" @@ -129509,23 +139290,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 69, - "total_frames": 8728, + "total_episodes": 99, + "total_frames": 57693, "fps": 30, "total_tasks": 8, - "total_videos": 276, + "total_videos": 396, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "322.26 MB" + "dataset_size": "2.09 GB" }, - "frame_num": 8728, - "dataset_size": "322.26 MB", - "data_structure": "Airbot_MMK2_Airbot_MMK2_storage_potato_left_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (57 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 57693, + "dataset_size": "2.09 GB", + "data_structure": "Airbot_MMK2_doodled_line_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:68" + "train": "0:98" }, "features": { "observation.images.cam_head_rgb": { @@ -129879,117 +139660,6 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "AgiBot-g1_storage_item_e": { - "path": "AgiBot-g1_storage_item_e", - "dataset_name": "storage_item_e", - "robot_type": "", - "end_effector_type": [ - "two_finger_gripper" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "place", - "pick" - ], - "tasks": "Grab and lift both the mouse and power cord from the accessory packaging area at the same time", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "data_cable", - "level1": "tool", - "level2": "data_cable", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "box", - "level1": "container", - "level2": "box", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "mouse", - "level1": "tool", - "level2": "mouse", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": null, - "frame_range": "0-212067", - "dataset_size": "92.8GB", - "statistics": { - "total_episodes": 507, - "total_frames": 212067, - "total_tasks": 1, - "total_videos": 4056, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "6d8ca1f6-f9cf-4b0c-bef5-fca799793086", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grab and lift both the mouse and power cord from the accessory packaging area at the same time", - "Place the mouse and power cord into the box", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AgiBot-g1_storage_item_e_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AgiBot-g1_storage_item_e_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_back_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_back_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_center_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_left_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_high_right_fisheye_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" - }, "Airbot_MMK2_storage_electronics_white_basket": { "task_categories": [ "robotics" @@ -142325,26 +151995,590 @@ 2 ], "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "dtype": "float32", - "shape": [ - 2 - ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, + "Galaxea_R1_Lite_fold_towel_twice": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Galaxea_R1_Lite_fold_towel_twice", + "dataset_uuid": "eb818363-8a30-492c-8639-78589399913b", + "scene_type": { + "level1": "household", + "level2": "bedroom", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "gray_square_towel", + "level1": "daily_necessities", + "level2": "gray_square_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yellow_square_towel", + "level1": "daily_necessities", + "level2": "towels", + "level3": "yellow_square_towel", + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "Use the gripper to fold the yellow towel and grey towel in half twice,then put the yellow towel on the grey towel." + ], + "sub_tasks": [ + { + "subtask": "Press the gray towel with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "Fold the yellow towel from left to right with left gripper", + "subtask_index": 1 + }, + { + "subtask": "Press the gray towel with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Fold the yellow towel upwards with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Fold the gray towel from left to right with left gripper", + "subtask_index": 4 + }, + { + "subtask": "Press the yellow towel with the right gripper", + "subtask_index": 5 + }, + { + "subtask": "Fold the gray towel from right to left with right gripper", + "subtask_index": 6 + }, + { + "subtask": "End", + "subtask_index": 7 + }, + { + "subtask": "Place the yellow towel on the gray towel with the left gripper", + "subtask_index": 8 + }, + { + "subtask": "Fold the yellow towel upwards with the left gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the grey towel on the yellow towel with the left gripper", + "subtask_index": 10 + }, + { + "subtask": "Place the grey towel on the yellow towel with the right gripper", + "subtask_index": 11 + }, + { + "subtask": "Fold the gray towel upwards with the right gripper", + "subtask_index": 12 + }, + { + "subtask": "Press the yellow towel with the left gripper", + "subtask_index": 13 + }, + { + "subtask": "Move the position of the yellow towel with the left gripper", + "subtask_index": 14 + }, + { + "subtask": "Place the yellow towel on the gray towel with the right gripper", + "subtask_index": 15 + }, + { + "subtask": "Fold the gray towel upwards with the left gripper", + "subtask_index": 16 + }, + { + "subtask": "Fold the yellow towel from right to left with right gripper", + "subtask_index": 17 + }, + { + "subtask": "null", + "subtask_index": 18 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place", + "fold" + ], + "robot_name": [ + "Galaxea_R1_Lite" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_left_rgb", + "cam_head_right_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 46, + "total_frames": 50022, + "fps": 30, + "total_tasks": 19, + "total_videos": 184, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "2.14 GB" + }, + "frame_num": 50022, + "dataset_size": "2.14 GB", + "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_fold_towel_twice_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (34 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:45" + }, + "features": { + "observation.images.cam_head_left_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 360, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 360, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 360, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 360, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" } }, "authors": { @@ -142366,9 +152600,9 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" }, - "Galaxea_R1_Lite_fold_towel_twice": { + "Airbot_MMK2_close_door_left": { "task_categories": [ "robotics" ], @@ -142398,8 +152632,8 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Galaxea_R1_Lite_fold_towel_twice", - "dataset_uuid": "eb818363-8a30-492c-8639-78589399913b", + "dataset_name": "Airbot_MMK2_close_door_left", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", "level2": "bedroom", @@ -142410,126 +152644,55 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "gray_square_towel", - "level1": "daily_necessities", - "level2": "gray_square_towel", + "object_name": "cabinet", + "level1": "home_storage", + "level2": "cabinet", "level3": null, "level4": null, "level5": null - }, - { - "object_name": "yellow_square_towel", - "level1": "daily_necessities", - "level2": "towels", - "level3": "yellow_square_towel", - "level4": null, - "level5": null } ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "Use the gripper to fold the yellow towel and grey towel in half twice,then put the yellow towel on the grey towel." + "close the cabinet door with your left hand." ], "sub_tasks": [ { - "subtask": "Press the gray towel with the left gripper", + "subtask": "Touch the door with the left gripper", "subtask_index": 0 }, { - "subtask": "Fold the yellow towel from left to right with left gripper", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "Press the gray towel with the right gripper", + "subtask": "Close the cupboard door with the left gripper", "subtask_index": 2 }, - { - "subtask": "Fold the yellow towel upwards with the right gripper", - "subtask_index": 3 - }, - { - "subtask": "Fold the gray towel from left to right with left gripper", - "subtask_index": 4 - }, - { - "subtask": "Press the yellow towel with the right gripper", - "subtask_index": 5 - }, - { - "subtask": "Fold the gray towel from right to left with right gripper", - "subtask_index": 6 - }, - { - "subtask": "End", - "subtask_index": 7 - }, - { - "subtask": "Place the yellow towel on the gray towel with the left gripper", - "subtask_index": 8 - }, - { - "subtask": "Fold the yellow towel upwards with the left gripper", - "subtask_index": 9 - }, - { - "subtask": "Place the grey towel on the yellow towel with the left gripper", - "subtask_index": 10 - }, - { - "subtask": "Place the grey towel on the yellow towel with the right gripper", - "subtask_index": 11 - }, - { - "subtask": "Fold the gray towel upwards with the right gripper", - "subtask_index": 12 - }, - { - "subtask": "Press the yellow towel with the left gripper", - "subtask_index": 13 - }, - { - "subtask": "Move the position of the yellow towel with the left gripper", - "subtask_index": 14 - }, - { - "subtask": "Place the yellow towel on the gray towel with the right gripper", - "subtask_index": 15 - }, - { - "subtask": "Fold the gray towel upwards with the left gripper", - "subtask_index": 16 - }, - { - "subtask": "Fold the yellow towel from right to left with right gripper", - "subtask_index": 17 - }, { "subtask": "null", - "subtask_index": 18 + "subtask_index": 3 } ], "atomic_actions": [ - "grasp", - "pick", - "place", - "fold" + "push" ], "robot_name": [ - "Galaxea_R1_Lite" + "Airbot_MMK2" ], - "end_effector_type": "two_finger_gripper", + "end_effector_type": "five_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ - "cam_head_left_rgb", - "cam_head_right_rgb", + "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb" + "cam_right_wrist_rgb", + "cam_front_rgb" ], "came_info": { - "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", - "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", @@ -142546,30 +152709,30 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 46, - "total_frames": 50022, + "total_episodes": 50, + "total_frames": 5322, "fps": 30, - "total_tasks": 19, - "total_videos": 184, + "total_tasks": 4, + "total_videos": 200, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 14, - "action_dim": 14, + "state_dim": 36, + "action_dim": 36, "camera_views": 4, - "dataset_size": "2.14 GB" + "dataset_size": "161.77 MB" }, - "frame_num": 50022, - "dataset_size": "2.14 GB", - "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_fold_towel_twice_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (34 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 5322, + "dataset_size": "161.77 MB", + "data_structure": "Airbot_MMK2_close_door_left_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:45" + "train": "0:49" }, "features": { - "observation.images.cam_head_left_rgb": { + "observation.images.cam_head_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -142578,8 +152741,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -142588,11 +152751,11 @@ "has_audio": false } }, - "observation.images.cam_head_right_rgb": { + "observation.images.cam_left_wrist_rgb": { "dtype": "video", "shape": [ - 720, - 1280, + 480, + 640, 3 ], "names": [ @@ -142601,8 +152764,8 @@ "channels" ], "info": { - "video.height": 720, - "video.width": 1280, + "video.height": 480, + "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, @@ -142611,10 +152774,10 @@ "has_audio": false } }, - "observation.images.cam_left_wrist_rgb": { + "observation.images.cam_right_wrist_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -142624,7 +152787,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -142634,10 +152797,10 @@ "has_audio": false } }, - "observation.images.cam_right_wrist_rgb": { + "observation.images.cam_front_rgb": { "dtype": "video", "shape": [ - 360, + 480, 640, 3 ], @@ -142647,7 +152810,7 @@ "channels" ], "info": { - "video.height": 360, + "video.height": 480, "video.width": 640, "video.codec": "av1", "video.pix_fmt": "yuv420p", @@ -142660,7 +152823,7 @@ "observation.state": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -142675,14 +152838,36 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "action": { "dtype": "float32", "shape": [ - 14 + 36 ], "names": [ "left_arm_joint_1_rad", @@ -142697,8 +152882,30 @@ "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_gripper_open", - "right_gripper_open" + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" ] }, "timestamp": { @@ -142849,66 +153056,6 @@ 2 ], "dtype": "int32" - }, - "gripper_open_scale_state": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_open_scale_action": { - "names": [ - "left_gripper_open_scale", - "right_gripper_open_scale" - ], - "shape": [ - 2 - ], - "dtype": "float32" - }, - "gripper_mode_state": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_mode_action": { - "names": [ - "left_gripper_mode", - "right_gripper_mode" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_state": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" - }, - "gripper_activity_action": { - "names": [ - "left_gripper_activity", - "right_gripper_activity" - ], - "shape": [ - 2 - ], - "dtype": "int32" } }, "authors": { @@ -142930,7 +153077,7 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, "R1_Lite_tea_service_table_setting": { "path": "R1_Lite_tea_service_table_setting", @@ -153657,7 +163804,632 @@ "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "8dfd4c19-842d-4dce-8253-ffaf5f56d7df", + "dataset_uuid": "8dfd4c19-842d-4dce-8253-ffaf5f56d7df", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Place the cup on the table", + "Place the comb on the table with the right gripper", + "Place the bathroom toiletry into the drawer", + "Pick up the cup", + "Pick up the bathroom toiletry", + "Open the drawer with the left gripper", + "Grasp the bathroom toiletry with the left gripper", + "Put toiletries in the drawer", + "Grasp the black cup with the left gripper", + "Place the cup on the storage box", + "Close the drawer", + "Open the drawer", + "Place the black cup on the drawer with the left gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "R1_Lite_tidy_up_toiletries_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "R1_Lite_tidy_up_toiletries_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_move_block_gold_bar_models": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_move_block_gold_bar_models", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "eyeglass_case", + "level1": "laboratory_supplies", + "level2": "eyeglass_case", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "early_education_toys", + "level1": "toys", + "level2": "early_education_toys", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "place the glasses case in the middle of the table with your left hand and put the toy gold bar on the glasses case with your right hand." + ], + "sub_tasks": [ + { + "subtask": "Place the gold bar on the glasses case with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Grasp the glasses case with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Abnormal", + "subtask_index": 2 + }, + { + "subtask": "Static", + "subtask_index": 3 + }, + { + "subtask": "Place the glasses case on the middle of the table with the left gripper", + "subtask_index": 4 + }, + { + "subtask": "End", + "subtask_index": 5 + }, + { + "subtask": "Grasp the gold bar the right gripper", + "subtask_index": 6 + }, + { + "subtask": "null", + "subtask_index": 7 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 50, + "total_frames": 10703, + "fps": 30, + "total_tasks": 8, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "434.03 MB" + }, + "frame_num": 10703, + "dataset_size": "434.03 MB", + "data_structure": "Airbot_MMK2_move_block_gold_bar_models_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:49" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "AIRBOT_MMK2_place_cookies_and_beer": { + "path": "AIRBOT_MMK2_place_cookies_and_beer", + "dataset_name": "place_cookies_and_beer", + "robot_type": "", + "end_effector_type": [ + "five_finger_hand" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "tasks": "Grasp the bagged cookies with the left gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "biscuit", + "level1": "food", + "level2": "biscuit", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "lid", + "level1": "daily_necessities", + "level2": "lid", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "beer_mug", + "level1": "container", + "level2": "beer_mug", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-10519", + "dataset_size": "515.1MB", + "statistics": { + "total_episodes": 50, + "total_frames": 10519, + "total_tasks": 1, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "a380537f-44a3-457c-8113-67604aec65a7", "language": [ "en", "zh" @@ -153666,19 +164438,11 @@ "robotics" ], "sub_tasks": [ - "Place the cup on the table", - "Place the comb on the table with the right gripper", - "Place the bathroom toiletry into the drawer", - "Pick up the cup", - "Pick up the bathroom toiletry", - "Open the drawer with the left gripper", - "Grasp the bathroom toiletry with the left gripper", - "Put toiletries in the drawer", - "Grasp the black cup with the left gripper", - "Place the cup on the storage box", - "Close the drawer", - "Open the drawer", - "Place the black cup on the drawer with the left gripper", + "Grasp the bagged cookies with the left gripper", + "End", + "Grasp the beer mug with the right gripper", + "Place the bagged cookies on the white basket with the left gripper", + "Place the beer mug on the white basket with the right gripper", "null" ], "annotations": { @@ -153716,10 +164480,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "R1_Lite_tidy_up_toiletries_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "R1_Lite_tidy_up_toiletries_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "AIRBOT_MMK2_place_cookies_and_beer_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "AIRBOT_MMK2_place_cookies_and_beer_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_move_block_gold_bar_models": { + "Agilex_Cobot_Magic_move_object_red_tablecloth": { "task_categories": [ "robotics" ], @@ -153749,11 +164513,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_move_block_gold_bar_models", + "dataset_name": "Agilex_Cobot_Magic_move_object_red_tablecloth", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "living_room", + "level1": "commercial & convenience", + "level2": "supermarket", "level3": null, "level4": null, "level5": null @@ -153761,17 +164525,241 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "eyeglass_case", + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "red_table_cloths", "level1": "laboratory_supplies", - "level2": "eyeglass_case", + "level2": "red_table_cloths", "level3": null, "level4": null, "level5": null }, { - "object_name": "early_education_toys", + "object_name": "waffle", + "level1": "food", + "level2": "waffle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_lemon", + "level1": "food", + "level2": "green_lemon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eggplant", + "level1": "food", + "level2": "eggplant", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chocolate", + "level1": "food", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mango", + "level1": "food", + "level2": "mango", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mint_candy", + "level1": "food", + "level2": "mint_candy", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mangosteen", + "level1": "food", + "level2": "mangosteen", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange", + "level1": "food", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread", + "level1": "food", + "level2": "bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "food", + "level2": "banana", + "level3": null, + "level4": "Fruit cake", + "level5": null + }, + { + "object_name": "cake", + "level1": "food", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "beef_cheeseburger", + "level1": "food", + "level2": "beef_cheeseburger", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "kitchen_supplies", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pan", + "level1": "kitchen_supplies", + "level2": "pan", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "small_teapot", + "level1": "kitchen_supplies", + "level2": "small_teapot", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "small_teacup", + "level1": "kitchen_supplies", + "level2": "small_teacup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "paper_ball", + "level1": "trash", + "level2": "paper_ball", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_square_towel", + "level1": "daily_necessities", + "level2": "brown_square_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "black_cylindrical_pen_holder", + "level1": "stationery", + "level2": "black_cylindrical_pen_holder", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pink_long_towel", + "level1": "daily_necessities", + "level2": "pink_long_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "whiteboard_eraser", + "level1": "stationery", + "level2": "whiteboard_eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_necessities", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "duck", "level1": "toys", - "level2": "early_education_toys", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "compass", + "level1": "stationery", + "level2": "compass", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "kitchen_supplies", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_long_towel", + "level1": "daily_necessities", + "level2": "blue_long_towel", "level3": null, "level4": null, "level5": null @@ -153779,69 +164767,523 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "place the glasses case in the middle of the table with your left hand and put the toy gold bar on the glasses case with your right hand." + "the gripper move the object." ], "sub_tasks": [ { - "subtask": "Place the gold bar on the glasses case with the right gripper", + "subtask": "Grasp the pink towel with the right gripper", "subtask_index": 0 }, { - "subtask": "Grasp the glasses case with the left gripper", + "subtask": "Place the XX on the table with the left gripper", "subtask_index": 1 }, { - "subtask": "Abnormal", + "subtask": "Place the mint candy on the table with the left gripper", "subtask_index": 2 }, { - "subtask": "Static", + "subtask": "Place the snickers on the table with the right gripper", "subtask_index": 3 }, { - "subtask": "Place the glasses case on the middle of the table with the left gripper", + "subtask": "Grasp the pen container with the right gripper", "subtask_index": 4 }, { - "subtask": "End", + "subtask": "Grasp the grey towel with the left gripper", "subtask_index": 5 }, { - "subtask": "Grasp the gold bar the right gripper", + "subtask": "Place the eyeglass case on the table with the left gripper", "subtask_index": 6 }, { - "subtask": "null", + "subtask": "Grasp the white duck with the left gripper", "subtask_index": 7 + }, + { + "subtask": "Place the eggplant on the table with the right gripper", + "subtask_index": 8 + }, + { + "subtask": "Place the blue bowl on the table with the right gripper", + "subtask_index": 9 + }, + { + "subtask": "Grasp the banana with the left gripper", + "subtask_index": 10 + }, + { + "subtask": "Place the compasses on the table with the right gripper", + "subtask_index": 11 + }, + { + "subtask": "Place the sandwich on the table with the left gripper", + "subtask_index": 12 + }, + { + "subtask": "Place the pink cake on the table with the right gripper", + "subtask_index": 13 + }, + { + "subtask": "Place the banana on the table with the right gripper", + "subtask_index": 14 + }, + { + "subtask": "Grasp the compasses with the right gripper", + "subtask_index": 15 + }, + { + "subtask": "Grasp the orange with the right gripper", + "subtask_index": 16 + }, + { + "subtask": "Grasp the blue cup with the left gripper", + "subtask_index": 17 + }, + { + "subtask": "Place the peach on the table with the right gripper", + "subtask_index": 18 + }, + { + "subtask": "Place the green lemon on the table with the right gripper", + "subtask_index": 19 + }, + { + "subtask": "Grasp the mint candy with the left gripper", + "subtask_index": 20 + }, + { + "subtask": "Grasp the sandwich with the right gripper", + "subtask_index": 21 + }, + { + "subtask": "Grasp the eyeglass case with the left gripper", + "subtask_index": 22 + }, + { + "subtask": "Place the compasses on the table with the left gripper", + "subtask_index": 23 + }, + { + "subtask": "Place the eyeglass case on the table with the right gripper", + "subtask_index": 24 + }, + { + "subtask": "Grasp the square chewing gum with the left gripper", + "subtask_index": 25 + }, + { + "subtask": "Place the brown towel on the table with the left gripper", + "subtask_index": 26 + }, + { + "subtask": "Place the blue bowl on the table with the left gripper", + "subtask_index": 27 + }, + { + "subtask": "Grasp the sandwich biscuit with the right gripper", + "subtask_index": 28 + }, + { + "subtask": "Place the white blackboard erasure on the table with the left gripper", + "subtask_index": 29 + }, + { + "subtask": "Grasp the white blackboard erasure with the left gripper", + "subtask_index": 30 + }, + { + "subtask": "Grasp the snickers with the right gripper", + "subtask_index": 31 + }, + { + "subtask": "Grasp the eyeglass case with the right gripper", + "subtask_index": 32 + }, + { + "subtask": "Place the eggplant on the table with the left gripper", + "subtask_index": 33 + }, + { + "subtask": "Place the mango on the table with the right gripper", + "subtask_index": 34 + }, + { + "subtask": "Place the hard facial cleanser on the table with the right gripper", + "subtask_index": 35 + }, + { + "subtask": "Place the mint candy on the table with the right gripper", + "subtask_index": 36 + }, + { + "subtask": "Grasp the brown towel with the left gripper", + "subtask_index": 37 + }, + { + "subtask": "Grasp the hard facial cleanser with the left gripper", + "subtask_index": 38 + }, + { + "subtask": "Grasp the chocolate with the right gripper", + "subtask_index": 39 + }, + { + "subtask": "Grasp the mangosteen with the left gripper", + "subtask_index": 40 + }, + { + "subtask": "Grasp the peach with the right gripper", + "subtask_index": 41 + }, + { + "subtask": "Grasp the brown towel with the right gripper", + "subtask_index": 42 + }, + { + "subtask": "Grasp the mango with the left gripper", + "subtask_index": 43 + }, + { + "subtask": "Place the snickers on the table with the left gripper", + "subtask_index": 44 + }, + { + "subtask": "Grasp the snickers with the left gripper", + "subtask_index": 45 + }, + { + "subtask": "Grasp the lemon with the left gripper", + "subtask_index": 46 + }, + { + "subtask": "Place the teapot on the table with the left gripper", + "subtask_index": 47 + }, + { + "subtask": "Grasp the hard facial cleanser with the right gripper", + "subtask_index": 48 + }, + { + "subtask": "Grasp the sandwich with the left gripper", + "subtask_index": 49 + }, + { + "subtask": "Place the white duck on the table with the left gripper", + "subtask_index": 50 + }, + { + "subtask": "Place the white duck on the table with the right gripper", + "subtask_index": 51 + }, + { + "subtask": "Grasp the teapot with the right gripper", + "subtask_index": 52 + }, + { + "subtask": "Place the pink towel on the table with the left gripper", + "subtask_index": 53 + }, + { + "subtask": "Grasp the hollow ring bread with the right gripper", + "subtask_index": 54 + }, + { + "subtask": "Grasp the hollow ring bread with the right gripper\n", + "subtask_index": 55 + }, + { + "subtask": "Grasp the blue bowl with the left gripper", + "subtask_index": 56 + }, + { + "subtask": "Place the waffle on the table with the right gripper", + "subtask_index": 57 + }, + { + "subtask": "Place the pen container on the table with the right gripper", + "subtask_index": 58 + }, + { + "subtask": "Place the mangosteen on the table with the right gripper", + "subtask_index": 59 + }, + { + "subtask": "Grasp the eggplant with the right gripper", + "subtask_index": 60 + }, + { + "subtask": "Grasp the pink cake with the left gripper", + "subtask_index": 61 + }, + { + "subtask": "Grasp the gray towel with the right gripper", + "subtask_index": 62 + }, + { + "subtask": "Place the pink towel on the table with the right gripper", + "subtask_index": 63 + }, + { + "subtask": "Grasp the green lemon with the right gripper", + "subtask_index": 64 + }, + { + "subtask": "Place the green lemon on the table with the left gripper", + "subtask_index": 65 + }, + { + "subtask": "Grasp the eggplant with the left gripper", + "subtask_index": 66 + }, + { + "subtask": "Place the hard facial cleanser on the table with the left gripper", + "subtask_index": 67 + }, + { + "subtask": "Place the lemon on the table with the left gripper", + "subtask_index": 68 + }, + { + "subtask": "End", + "subtask_index": 69 + }, + { + "subtask": "Grasp the white duck with the right gripper", + "subtask_index": 70 + }, + { + "subtask": "Grasp the white blackboard erasure with the right gripper", + "subtask_index": 71 + }, + { + "subtask": "Grasp the blue blackboard erasure with the left gripper", + "subtask_index": 72 + }, + { + "subtask": "Place the sandwich biscuit on the table with the right gripper", + "subtask_index": 73 + }, + { + "subtask": "Place the sandwich on the table with the right gripper", + "subtask_index": 74 + }, + { + "subtask": "Place the fruit candy on the table with the right gripper", + "subtask_index": 75 + }, + { + "subtask": "Place the gray towel on the table with the right gripper", + "subtask_index": 76 + }, + { + "subtask": "Place the blue cup on the table with the right gripper", + "subtask_index": 77 + }, + { + "subtask": "Place the pen container on the table with the left gripper", + "subtask_index": 78 + }, + { + "subtask": "Grasp the compasses with the left gripper", + "subtask_index": 79 + }, + { + "subtask": "Grasp the waffle with the right gripper", + "subtask_index": 80 + }, + { + "subtask": "Grasp the blue bowl with the right gripper", + "subtask_index": 81 + }, + { + "subtask": "Place the pink cake on the table with the left gripper", + "subtask_index": 82 + }, + { + "subtask": "Abnormal", + "subtask_index": 83 + }, + { + "subtask": "Place the square chewing gum on the table with the right gripper", + "subtask_index": 84 + }, + { + "subtask": "Place the blue blackboard erasure on the table with the left gripper", + "subtask_index": 85 + }, + { + "subtask": "Grasp the pink towel with the left gripper", + "subtask_index": 86 + }, + { + "subtask": "Grasp the mangosteen with the right gripper", + "subtask_index": 87 + }, + { + "subtask": "Place the grey towel on the table with the left gripper", + "subtask_index": 88 + }, + { + "subtask": "Grasp the orange with the left gripper", + "subtask_index": 89 + }, + { + "subtask": "Place the hollow ring bread on the table with the right gripper", + "subtask_index": 90 + }, + { + "subtask": "Place the blue cup on the table with the left gripper", + "subtask_index": 91 + }, + { + "subtask": "Place the orange on the table with the right gripper", + "subtask_index": 92 + }, + { + "subtask": "Place the teapot on the table with the right gripper", + "subtask_index": 93 + }, + { + "subtask": "Place the mangosteen on the table with the left gripper", + "subtask_index": 94 + }, + { + "subtask": "Grasp the fruit candy with the right gripper", + "subtask_index": 95 + }, + { + "subtask": "Grasp the square chewing gum with the right gripper", + "subtask_index": 96 + }, + { + "subtask": "Place the white blackboard erasure on the table with the right gripper", + "subtask_index": 97 + }, + { + "subtask": "Grasp the pink cake with the right gripper", + "subtask_index": 98 + }, + { + "subtask": "Place the mango on the table with the left gripper", + "subtask_index": 99 + }, + { + "subtask": "Place the chocolate on the table with the left gripper", + "subtask_index": 100 + }, + { + "subtask": "Place the hollow ring bread on the table with the right gripper\n", + "subtask_index": 101 + }, + { + "subtask": "Grasp the blue blackboard erasure with the right gripper", + "subtask_index": 102 + }, + { + "subtask": "Place the banana on the table with the left gripper", + "subtask_index": 103 + }, + { + "subtask": "Place the hard facial cleanser on the table with the right gripper\n", + "subtask_index": 104 + }, + { + "subtask": "Place the blue blackboard erasure on the table with the right gripper", + "subtask_index": 105 + }, + { + "subtask": "Grasp the chocolate with the left gripper", + "subtask_index": 106 + }, + { + "subtask": "Grasp the mango with the right gripper", + "subtask_index": 107 + }, + { + "subtask": "Place the orange on the table with the left gripper", + "subtask_index": 108 + }, + { + "subtask": "Grasp the XX with the left gripper", + "subtask_index": 109 + }, + { + "subtask": "Place the chocolate on the table with the right gripper", + "subtask_index": 110 + }, + { + "subtask": "Grasp the blue cup with the right gripper", + "subtask_index": 111 + }, + { + "subtask": "Grasp the mint candy with the right gripper", + "subtask_index": 112 + }, + { + "subtask": "Place the brown towel on the table with the right gripper", + "subtask_index": 113 + }, + { + "subtask": "Grasp the pen container with the left gripper", + "subtask_index": 114 + }, + { + "subtask": "Grasp the green lemon with the left gripper", + "subtask_index": 115 + }, + { + "subtask": "Grasp the tea cup with the left gripper", + "subtask_index": 116 + }, + { + "subtask": "Place the tea cup on the table with the left gripper", + "subtask_index": 117 + }, + { + "subtask": "Place the square chewing gum on the table with the left gripper", + "subtask_index": 118 + }, + { + "subtask": "Grasp the teapot with the left gripper", + "subtask_index": 119 + }, + { + "subtask": "Grasp the banana with the right gripper", + "subtask_index": 120 + }, + { + "subtask": "null", + "subtask_index": 121 } ], "atomic_actions": [ - "grasp", - "pick", - "place" + "rasp", + "lift", + "lower" ], "robot_name": [ - "Airbot_MMK2" + "Agilex_Cobot_Magic" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -153852,23 +165294,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 10703, + "total_episodes": 198, + "total_frames": 100817, "fps": 30, - "total_tasks": 8, - "total_videos": 200, + "total_tasks": 122, + "total_videos": 594, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "434.03 MB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "3.23 GB" }, - "frame_num": 10703, - "dataset_size": "434.03 MB", - "data_structure": "Airbot_MMK2_move_block_gold_bar_models_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 100817, + "dataset_size": "3.23 GB", + "data_structure": "Agilex_Cobot_Magic_move_object_red_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (186 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:49" + "train": "0:197" }, "features": { "observation.images.cam_head_rgb": { @@ -153940,33 +165382,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, "observation.state": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -153975,42 +165394,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -154019,36 +165428,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -154088,17 +165487,17 @@ }, "subtask_annotation": { "names": null, + "dtype": "int32", "shape": [ 5 - ], - "dtype": "int32" + ] }, "scene_annotation": { "names": null, + "dtype": "int32", "shape": [ 1 - ], - "dtype": "int32" + ] }, "eef_sim_pose_state": { "names": [ @@ -154115,10 +165514,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_sim_pose_action": { "names": [ @@ -154135,70 +165534,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", "shape": [ 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" ], - "dtype": "int32" + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } }, "authors": { @@ -154220,121 +165679,7 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" - }, - "AIRBOT_MMK2_place_cookies_and_beer": { - "path": "AIRBOT_MMK2_place_cookies_and_beer", - "dataset_name": "place_cookies_and_beer", - "robot_type": "", - "end_effector_type": [ - "five_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Grasp the bagged cookies with the left gripper", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "biscuit", - "level1": "food", - "level2": "biscuit", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "lid", - "level1": "daily_necessities", - "level2": "lid", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "beer_mug", - "level1": "container", - "level2": "beer_mug", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-10519", - "dataset_size": "515.1MB", - "statistics": { - "total_episodes": 50, - "total_frames": 10519, - "total_tasks": 1, - "total_videos": 200, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "a380537f-44a3-457c-8113-67604aec65a7", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the bagged cookies with the left gripper", - "End", - "Grasp the beer mug with the right gripper", - "Place the bagged cookies on the white basket with the left gripper", - "Place the beer mug on the white basket with the right gripper", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "AIRBOT_MMK2_place_cookies_and_beer_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "AIRBOT_MMK2_place_cookies_and_beer_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, "AgiBot-g1_storage_item_b": { "path": "AgiBot-g1_storage_item_b", @@ -172300,6 +183645,502 @@ "data_schema": "AIRBOT_MMK2_storage_toy_cars_and_cookies_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "AIRBOT_MMK2_storage_toy_cars_and_cookies_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "Airbot_MMK2_cut_scallion": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_cut_scallion", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "kitchen", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "kitchen_knife", + "level1": "kitchen_supplies", + "level2": "kitchen_knife", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "garlic", + "level1": "vegetables", + "level2": "garlic", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "pick up the kitchen knife with your hand and cut the vegetables." + ], + "sub_tasks": [ + { + "subtask": "Grasp the kitchen knife with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Place the kitchen knife back on the knife holder with the right gripper", + "subtask_index": 1 + }, + { + "subtask": "End", + "subtask_index": 2 + }, + { + "subtask": "Cut scallions with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Press the scallion with the left gripper", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place", + "cut" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 97, + "total_frames": 33460, + "fps": 30, + "total_tasks": 6, + "total_videos": 388, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "1.17 GB" + }, + "frame_num": 33460, + "dataset_size": "1.17 GB", + "data_structure": "Airbot_MMK2_cut_scallion_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (85 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:96" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, "leju_robot_box_storage_parcel_c": { "path": "leju_robot_box_storage_parcel_c", "dataset_name": "box_storage_parcel_c", @@ -175773,6 +187614,646 @@ "data_schema": "leju_robot_hotel_services_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "leju_robot_hotel_services_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "agilex_cobot_magic_pass_object_left_to_right_white_tablecloth": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "agilex_cobot_magic_pass_object_left_to_right_white_tablecloth", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "commercial_convenience", + "level2": "supermarket", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ambrosial_yogurt", + "level1": "food", + "level2": "ambrosial_yogurt", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "food", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "long_bread", + "level1": "food", + "level2": "long_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "milk", + "level1": "food", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yogurt", + "level1": "food", + "level2": "yogurt", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "grape", + "level1": "food", + "level2": "grape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ham_sausage", + "level1": "food", + "level2": "ham_sausage", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eggplant", + "level1": "food", + "level2": "eggplant", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eyeglass_case", + "level1": "laboratory_supplies", + "level2": "eyeglass_case", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rubik's_cube", + "level1": "toys", + "level2": "rubik's_cube", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "purple_trash_bag", + "level1": "trash", + "level2": "purple_trash_bag", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cleanser", + "level1": "daily_necessities", + "level2": "cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_necessities", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "whiteboard_eraser", + "level1": "stationery", + "level2": "whiteboard_eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "candle", + "level1": "daily_necessities", + "level2": "candle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "white_table_cloths", + "level1": "laboratory_supplies", + "level2": "white_table_cloths", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use the left gripper to pick up the item and transfer it from the left gripper to the right gripper." + ], + "sub_tasks": [ + { + "subtask": "Unlabeled", + "subtask_index": 0 + }, + { + "subtask": "Grasp the Rubik's Cube with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Pass the Rubik's Cube to the right gripper", + "subtask_index": 2 + }, + { + "subtask": "End", + "subtask_index": 3 + }, + { + "subtask": "Place the Rubik's Cube on the table with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower", + "handover", + "takeover" + ], + "robot_name": [ + "agilex_cobot_magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 96, + "total_frames": 55704, + "fps": 30, + "total_tasks": 6, + "total_videos": 288, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "546.67 MB" + }, + "frame_num": 55704, + "dataset_size": "546.67 MB", + "data_structure": "Agilex_Cobot_Magic_pass_object_left_to_right_white_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (84 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:95" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, "Galaxea_R1_Lite_storage_object_white_box": { "task_categories": [ "robotics" diff --git a/info/data_index.json b/info/data_index.json index 64f6b2a396065f2a1866facb21b58a570ecc6714..abe8e488334691e627fed9f4e859d3a657b15732 100644 --- a/info/data_index.json +++ b/info/data_index.json @@ -112,9 +112,11 @@ "Agilex_Cobot_Magic_erase_board", "Agilex_Cobot_Magic_erase_board_left", "Agilex_Cobot_Magic_erase_board_left_side", + "Agilex_Cobot_Magic_erase_board_passing_left_to_right", "Agilex_Cobot_Magic_erase_board_passing_right_to_left", "Agilex_Cobot_Magic_erase_board_right", "Agilex_Cobot_Magic_fold_jeans_shorts_children's", + "Agilex_Cobot_Magic_fold_short_sleeve_black", "Agilex_Cobot_Magic_fold_shorts_khaki", "Agilex_Cobot_Magic_fold_towel", "Agilex_Cobot_Magic_fold_towel_blue_tray", @@ -129,6 +131,8 @@ "Agilex_Cobot_Magic_move_mouse_pen_red_tablecloth", "Agilex_Cobot_Magic_move_object_beige_tablecloth", "Agilex_Cobot_Magic_move_object_black_tablecloth", + "Agilex_Cobot_Magic_move_object_green_tablecloth", + "Agilex_Cobot_Magic_move_object_red_tablecloth", "Agilex_Cobot_Magic_move_pencil_sharpener", "Agilex_Cobot_Magic_open_drawer_bottom", "Agilex_Cobot_Magic_organize_test_tube", @@ -140,22 +144,28 @@ "Agilex_Cobot_Magic_storage_object_closest", "Agilex_Cobot_Magic_storage_object_closest_apple", "Agilex_Cobot_Magic_storage_object_left", + "Agilex_Cobot_Magic_storage_object_red_tablecloth", "Agilex_Cobot_Magic_storage_orange_basket_left", "Agilex_Cobot_Magic_storage_orange_basket_right", "Agilex_Cobot_Magic_storage_orange_white_bag", + "Agilex_Cobot_Magic_storage_peach_brown_bag", "Agilex_Cobot_Magic_storage_peach_left", "Agilex_Cobot_Magic_storage_peach_right", "Airbot_MMK2_click_pen", + "Airbot_MMK2_close_door_left", "Airbot_MMK2_close_door_right", "Airbot_MMK2_close_doors", "Airbot_MMK2_close_drawer", "Airbot_MMK2_close_lid", "Airbot_MMK2_cover_lid", + "Airbot_MMK2_cut_scallion", "Airbot_MMK2_dial_number", + "Airbot_MMK2_doodled_line", "Airbot_MMK2_move_apple_orange_pomegranate", "Airbot_MMK2_move_block", "Airbot_MMK2_move_block_both_hands", "Airbot_MMK2_move_block_gold_bar_models", + "Airbot_MMK2_move_block_twice", "Airbot_MMK2_move_block_wet_wipes", "Airbot_MMK2_move_book_front", "Airbot_MMK2_move_book_right_side", @@ -166,6 +176,7 @@ "Airbot_MMK2_move_fake_food", "Airbot_MMK2_move_medicine_bottle", "Airbot_MMK2_move_pan", + "Airbot_MMK2_move_paper_box", "Airbot_MMK2_move_phone_twice", "Airbot_MMK2_move_sword_doll", "Airbot_MMK2_move_tennis_racket_ball", @@ -175,6 +186,7 @@ "Airbot_MMK2_open_door_right", "Airbot_MMK2_open_laptop", "Airbot_MMK2_open_lid", + "Airbot_MMK2_organize_plate", "Airbot_MMK2_pass_paper_box", "Airbot_MMK2_pick_up_and_place_tub", "Airbot_MMK2_play_guitar", @@ -195,6 +207,7 @@ "Airbot_MMK2_stack_block", "Airbot_MMK2_stack_bowl", "Airbot_MMK2_stack_cubic_block", + "Airbot_MMK2_stack_cup", "Airbot_MMK2_storage_and_take_cake_plate", "Airbot_MMK2_storage_apple_orange", "Airbot_MMK2_storage_badminton", @@ -204,6 +217,7 @@ "Airbot_MMK2_storage_block_BBs", "Airbot_MMK2_storage_block_both_hands", "Airbot_MMK2_storage_block_tape_measure", + "Airbot_MMK2_storage_book", "Airbot_MMK2_storage_bottle_part", "Airbot_MMK2_storage_bowl", "Airbot_MMK2_storage_bowl_wet_wipes", @@ -223,6 +237,7 @@ "Airbot_MMK2_storage_diamond_ring", "Airbot_MMK2_storage_egg_bowl", "Airbot_MMK2_storage_egg_plate", + "Airbot_MMK2_storage_egg_white_box", "Airbot_MMK2_storage_egg_yellow_box", "Airbot_MMK2_storage_electronics_white_basket", "Airbot_MMK2_storage_electronics_yellow_baket", @@ -232,7 +247,9 @@ "Airbot_MMK2_storage_ice_cream", "Airbot_MMK2_storage_lemon_mango", "Airbot_MMK2_storage_mango_pomegranate", + "Airbot_MMK2_storage_milk_tissue", "Airbot_MMK2_storage_network_cable_paper_box", + "Airbot_MMK2_storage_onion_sweet_potato", "Airbot_MMK2_storage_paper_box_sponge", "Airbot_MMK2_storage_peach_pear", "Airbot_MMK2_storage_penguin_doll_tiger_doll", @@ -277,6 +294,7 @@ "Airbot_MMK2_take_toy_car", "Airbot_MMK2_turn_page", "Airbot_MMK2_unplug", + "Airbot_MMK2_unscrew_bottle_cap", "Cobot_Magic_box_storage_chopsticks", "Cobot_Magic_cap_the_pen_a", "Cobot_Magic_catch_the_ball", @@ -410,6 +428,7 @@ "Galaxea_R1_Lite_mix_red_yellow_right", "Galaxea_R1_Lite_move_mouse", "Galaxea_R1_Lite_pour_liquid_mrable_bar_counter", + "Galaxea_R1_Lite_pour_powder_marble_bar_counter", "Galaxea_R1_Lite_pour_solid", "Galaxea_R1_Lite_pour_solid_marble_bar_counter", "Galaxea_R1_Lite_pour_water", @@ -566,6 +585,7 @@ "agilex_cobot_magic_pass_object_left_to_right_black_tablecloth", "agilex_cobot_magic_pass_object_left_to_right_green_tablecloth", "agilex_cobot_magic_pass_object_left_to_right_khaki_tablecloth", + "agilex_cobot_magic_pass_object_left_to_right_white_tablecloth", "agilex_cobot_magic_pass_object_right_to_left_black_tablecloth", "agilex_cobot_magic_pass_object_right_to_left_green_tablecloth", "agilex_cobot_magic_pass_object_right_to_left_khaki_tablecloth", @@ -631,5 +651,5 @@ "leju_robot_pass_the_cleaner_d", "leju_robot_pass_the_cleaner_e" ], - "count": 630 + "count": 650 } \ No newline at end of file diff --git a/thumbnails/Agilex_Cobot_Magic_erase_board_passing_left_to_right.jpg b/thumbnails/Agilex_Cobot_Magic_erase_board_passing_left_to_right.jpg new file mode 100644 index 0000000000000000000000000000000000000000..640b4a57afc9c8d22334f66163a466b6dfc72995 --- /dev/null +++ b/thumbnails/Agilex_Cobot_Magic_erase_board_passing_left_to_right.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c9212a90351e6afc893778d2f9e2c0244e2ddd760d8a32e2f494703388df065e +size 12948 diff --git a/thumbnails/Agilex_Cobot_Magic_fold_short_sleeve_black.jpg b/thumbnails/Agilex_Cobot_Magic_fold_short_sleeve_black.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d68beb618bae23e5c065b75a0c1b13a9749fb4a7 --- /dev/null +++ b/thumbnails/Agilex_Cobot_Magic_fold_short_sleeve_black.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:807f266b0e785b46a70245f2592ee62dcabe39cc4e2aa976530d5f0e52beeead +size 16467 diff --git a/thumbnails/Agilex_Cobot_Magic_move_object_green_tablecloth.jpg b/thumbnails/Agilex_Cobot_Magic_move_object_green_tablecloth.jpg new file mode 100644 index 0000000000000000000000000000000000000000..077233453e9047f899ce0636d3517f419cc6c594 --- /dev/null +++ b/thumbnails/Agilex_Cobot_Magic_move_object_green_tablecloth.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1f3b78a9b91e388a8d9d60c49e29fc168d8885689744c2d2ef8e3f0c253d767b +size 44632 diff --git a/thumbnails/Agilex_Cobot_Magic_move_object_red_tablecloth.jpg b/thumbnails/Agilex_Cobot_Magic_move_object_red_tablecloth.jpg new file mode 100644 index 0000000000000000000000000000000000000000..cdd3665211b66bceb61d69d19de8157543c1c40b --- /dev/null +++ b/thumbnails/Agilex_Cobot_Magic_move_object_red_tablecloth.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e994cb25d9a0ba6314ca012153b920130ff515d178f7d93633916da54071eb2b +size 24430 diff --git a/thumbnails/Agilex_Cobot_Magic_storage_object_red_tablecloth.jpg b/thumbnails/Agilex_Cobot_Magic_storage_object_red_tablecloth.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b75808fb1e3902f4abd86f72ece4ca15d730faef --- /dev/null +++ b/thumbnails/Agilex_Cobot_Magic_storage_object_red_tablecloth.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:28c758171a015cbdc5ec75ab74095dec549e00ae24e9d3f12c3d502ab87e9bbb +size 29606 diff --git a/thumbnails/Agilex_Cobot_Magic_storage_peach_brown_bag.jpg b/thumbnails/Agilex_Cobot_Magic_storage_peach_brown_bag.jpg new file mode 100644 index 0000000000000000000000000000000000000000..657d78471a9313e7aa52ad922f2880d106fad1c4 --- /dev/null +++ b/thumbnails/Agilex_Cobot_Magic_storage_peach_brown_bag.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a235ea282734f173902ecc6516d9cf4857e6c61e6dda73041e76a69902808cc3 +size 12700 diff --git a/thumbnails/Airbot_MMK2_close_door_left.jpg b/thumbnails/Airbot_MMK2_close_door_left.jpg new file mode 100644 index 0000000000000000000000000000000000000000..abe7441acd4f04d84bf763c85dfeed8edd5549b1 --- /dev/null +++ b/thumbnails/Airbot_MMK2_close_door_left.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0e790f95387b183c48c6805ee611545bb99c54756e4da6986e81cdaa431b49ee +size 34814 diff --git a/thumbnails/Airbot_MMK2_cut_scallion.jpg b/thumbnails/Airbot_MMK2_cut_scallion.jpg new file mode 100644 index 0000000000000000000000000000000000000000..21d9b5b36f949ae18738f35edfa6e557577c3302 --- /dev/null +++ b/thumbnails/Airbot_MMK2_cut_scallion.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2d72fddf331ad5c6a482f176ae5cb5729782070600d4fec42737bc19501e00be +size 29695 diff --git a/thumbnails/Airbot_MMK2_doodled_line.jpg b/thumbnails/Airbot_MMK2_doodled_line.jpg new file mode 100644 index 0000000000000000000000000000000000000000..362349673d70592a22b0dc6e4bf57e0c774bfdab --- /dev/null +++ b/thumbnails/Airbot_MMK2_doodled_line.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cf408dd3004e1d6e3aed22ed329f45813e239153f77754032eea8a0d07a844f9 +size 32036 diff --git a/thumbnails/Airbot_MMK2_move_block_twice.jpg b/thumbnails/Airbot_MMK2_move_block_twice.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4d2785ebe440f9d422f6d77485ca06aba20a9a25 --- /dev/null +++ b/thumbnails/Airbot_MMK2_move_block_twice.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dfeb690aa3350072369b1146d1d22428282b1d414d32f0871fa6ca1f78713be9 +size 52748 diff --git a/thumbnails/Airbot_MMK2_move_paper_box.jpg b/thumbnails/Airbot_MMK2_move_paper_box.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a2f296d625277019841231eb5b41b9b9526928f7 --- /dev/null +++ b/thumbnails/Airbot_MMK2_move_paper_box.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:12e699981adee3b7735116211abbf51156765bb3a803955c63edfc088a3ac7c2 +size 36876 diff --git a/thumbnails/Airbot_MMK2_organize_plate.jpg b/thumbnails/Airbot_MMK2_organize_plate.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8dc15e8e44a6df48ccce066a3e9e781bc5635c81 --- /dev/null +++ b/thumbnails/Airbot_MMK2_organize_plate.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:903c677301605ef9f81b03956b4c399c413a49daa6afee7992fd8ed037202e7f +size 34067 diff --git a/thumbnails/Airbot_MMK2_stack_cup.jpg b/thumbnails/Airbot_MMK2_stack_cup.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5df8720e214a46279e62b9ad5ab43b7ca591708c --- /dev/null +++ b/thumbnails/Airbot_MMK2_stack_cup.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fe0163a713e03ce1996109e1960b3cbe71d4e22bf09664bcaeb20870e1503ffa +size 37576 diff --git a/thumbnails/Airbot_MMK2_storage_book.jpg b/thumbnails/Airbot_MMK2_storage_book.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5abd7eeab4e38b60f63dc69dbe0f94b573729afc --- /dev/null +++ b/thumbnails/Airbot_MMK2_storage_book.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c81821bbc79e5359f6ee70d6297a7d70d9ff30c299574048da354287c89a20cb +size 59155 diff --git a/thumbnails/Airbot_MMK2_storage_egg_white_box.jpg b/thumbnails/Airbot_MMK2_storage_egg_white_box.jpg new file mode 100644 index 0000000000000000000000000000000000000000..baaec003b6d33dd46822be242833a796b160338e --- /dev/null +++ b/thumbnails/Airbot_MMK2_storage_egg_white_box.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:70b004686f008eb9d643566720202f4b34ba026dcd1099b90e449fa2381b1e4c +size 25024 diff --git a/thumbnails/Airbot_MMK2_storage_milk_tissue.jpg b/thumbnails/Airbot_MMK2_storage_milk_tissue.jpg new file mode 100644 index 0000000000000000000000000000000000000000..26b934626e619f911544dcf2d5adf744a6ea7c5b --- /dev/null +++ b/thumbnails/Airbot_MMK2_storage_milk_tissue.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2c987f03633163456727877211bce4e9bf51925198ceed0cc614b32f820b0706 +size 28677 diff --git a/thumbnails/Airbot_MMK2_storage_onion_sweet_potato.jpg b/thumbnails/Airbot_MMK2_storage_onion_sweet_potato.jpg new file mode 100644 index 0000000000000000000000000000000000000000..eafbe6ce503a916b6ff7158ec1f9ff4ec6dfb1be --- /dev/null +++ b/thumbnails/Airbot_MMK2_storage_onion_sweet_potato.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:44bf7a0a4c8c19ed3c75c4455b7b4f684f942cbf68a9d33d19a712623c4fbb67 +size 21721 diff --git a/thumbnails/Airbot_MMK2_unscrew_bottle_cap.jpg b/thumbnails/Airbot_MMK2_unscrew_bottle_cap.jpg new file mode 100644 index 0000000000000000000000000000000000000000..20bbc35924fc5552f48a8103f9172993b6adecdf --- /dev/null +++ b/thumbnails/Airbot_MMK2_unscrew_bottle_cap.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:93ae3d3d8fad5320ed8240d9a2e7de0415a75c2b4d1ec375260bdc68ab3c6580 +size 44494 diff --git a/thumbnails/Galaxea_R1_Lite_pour_powder_marble_bar_counter.jpg b/thumbnails/Galaxea_R1_Lite_pour_powder_marble_bar_counter.jpg new file mode 100644 index 0000000000000000000000000000000000000000..af516b383a38e8f74e6e805058aff5e1e402024c --- /dev/null +++ b/thumbnails/Galaxea_R1_Lite_pour_powder_marble_bar_counter.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d6c1a076c81587c40cabae7ef94a08c657f30ea2eb65595fca820ba4e84eb29f +size 35426 diff --git a/thumbnails/agilex_cobot_magic_pass_object_left_to_right_white_tablecloth.jpg b/thumbnails/agilex_cobot_magic_pass_object_left_to_right_white_tablecloth.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e36cfc6a295cd5099687fef21836505acc8fee9d --- /dev/null +++ b/thumbnails/agilex_cobot_magic_pass_object_left_to_right_white_tablecloth.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bde461dab936027ffcd8652662e555bc310638d7685f4a8514814fa7bcd77616 +size 13513 diff --git a/videos/Agilex_Cobot_Magic_erase_board_passing_left_to_right.mp4 b/videos/Agilex_Cobot_Magic_erase_board_passing_left_to_right.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..15072f79716e286a3d3dbfc9f8d2c4b7d2def579 --- /dev/null +++ b/videos/Agilex_Cobot_Magic_erase_board_passing_left_to_right.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:528a2ea1f903bb0fcbda90802ad2150884bdcdec01f7df9789b89ac071a97c49 +size 1243807 diff --git a/videos/Agilex_Cobot_Magic_fold_short_sleeve_black.mp4 b/videos/Agilex_Cobot_Magic_fold_short_sleeve_black.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..642ae7fc707a505a5f9cadc7e11e5d0db16418a2 --- /dev/null +++ b/videos/Agilex_Cobot_Magic_fold_short_sleeve_black.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eea67d96193ba6a2a39eb88e08951806b6974527c7add2d76cedfc6a3e5d192b +size 2937657 diff --git a/videos/Agilex_Cobot_Magic_move_object_green_tablecloth.mp4 b/videos/Agilex_Cobot_Magic_move_object_green_tablecloth.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..cdc6e0efe07e152208a86901766b95577f7934f3 --- /dev/null +++ b/videos/Agilex_Cobot_Magic_move_object_green_tablecloth.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:da23acee054ef4fdf6c041663be6fc33c9175ee8d2454c75abf24a3fabec69de +size 1382602 diff --git a/videos/Agilex_Cobot_Magic_move_object_red_tablecloth.mp4 b/videos/Agilex_Cobot_Magic_move_object_red_tablecloth.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..8d36a51190d37a36632912cba5cbc7a38e64b011 --- /dev/null +++ b/videos/Agilex_Cobot_Magic_move_object_red_tablecloth.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b16208be2dec60a346ecd62a11a6c53fac04d1fccc1d691f5bd25813f127bde8 +size 760579 diff --git a/videos/Agilex_Cobot_Magic_storage_object_red_tablecloth.mp4 b/videos/Agilex_Cobot_Magic_storage_object_red_tablecloth.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..a52aaa44731551cd52f19cf3b4a6cd48ea7a2251 --- /dev/null +++ b/videos/Agilex_Cobot_Magic_storage_object_red_tablecloth.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:35ca24237dad46a7d764f7397d4bcb837fba3d42657bce736f84420e52f2518e +size 857936 diff --git a/videos/Agilex_Cobot_Magic_storage_peach_brown_bag.mp4 b/videos/Agilex_Cobot_Magic_storage_peach_brown_bag.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..ed1bbf848b32a61c0ac0104622f9b30c7439bb95 --- /dev/null +++ b/videos/Agilex_Cobot_Magic_storage_peach_brown_bag.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fa65d8bbbeefab65b26ee54102da479dd373095f8db815794b8d60e928290142 +size 740732 diff --git a/videos/Airbot_MMK2_close_door_left.mp4 b/videos/Airbot_MMK2_close_door_left.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..eafc7788f5f6694fba1d6ae633e3676f762261ca --- /dev/null +++ b/videos/Airbot_MMK2_close_door_left.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ac202a1322ac9b41cd9e9f4ab69655abe26fe6b5a3fe7d2b26c5f5b5b7a04d0 +size 195322 diff --git a/videos/Airbot_MMK2_cut_scallion.mp4 b/videos/Airbot_MMK2_cut_scallion.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..462317b98614bb940992358e16c4fcca0ffed7ff --- /dev/null +++ b/videos/Airbot_MMK2_cut_scallion.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:750586b390bd15043fc80d22d0615c61b7ed1d47aa29f67dd0df20dac585447e +size 959950 diff --git a/videos/Airbot_MMK2_doodled_line.mp4 b/videos/Airbot_MMK2_doodled_line.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..32a8f5a7edacefb0a810663d124186c6982d4b94 --- /dev/null +++ b/videos/Airbot_MMK2_doodled_line.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2ff14457706811f9c635a503c9658b41c89422014dd275de25cb88c809815f6c +size 1822251 diff --git a/videos/Airbot_MMK2_move_block_twice.mp4 b/videos/Airbot_MMK2_move_block_twice.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..8880fe5173e3a3bea742e73cda54287cc0135768 --- /dev/null +++ b/videos/Airbot_MMK2_move_block_twice.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:13082e0e9efa4184ea327679871b086debba45f0f9e38f18e997f979e152b223 +size 622338 diff --git a/videos/Airbot_MMK2_move_paper_box.mp4 b/videos/Airbot_MMK2_move_paper_box.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..29e356d7a54e18ba15c19c588e3b1fe0359119d3 --- /dev/null +++ b/videos/Airbot_MMK2_move_paper_box.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c30dfc3b124d144606066b103ecab9f2669e53183e8b6bf2470217806eb2379d +size 293907 diff --git a/videos/Airbot_MMK2_organize_plate.mp4 b/videos/Airbot_MMK2_organize_plate.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..fd10a77f42ba157207cf972171a7473300378284 --- /dev/null +++ b/videos/Airbot_MMK2_organize_plate.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7cde9c9141daaac4ab5a1ca543912b97edb05ba203a8d0c7fa76be7d76fc4fa5 +size 2111184 diff --git a/videos/Airbot_MMK2_stack_cup.mp4 b/videos/Airbot_MMK2_stack_cup.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..8603f002cb219d4787e741956c62abe257c996a2 --- /dev/null +++ b/videos/Airbot_MMK2_stack_cup.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f7688f98ee750b43c5d6946531514421d1cbf7f539f2aa35625001ce9917ad80 +size 1761163 diff --git a/videos/Airbot_MMK2_storage_book.mp4 b/videos/Airbot_MMK2_storage_book.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..b4f6e073c051670d960dd14e241d5087ff0a62de --- /dev/null +++ b/videos/Airbot_MMK2_storage_book.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:45ff0744efac6bc25e75878e85bbc16db130cbbc7e806aabeb1a6774458a6b72 +size 1493414 diff --git a/videos/Airbot_MMK2_storage_egg_white_box.mp4 b/videos/Airbot_MMK2_storage_egg_white_box.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..c08788a8840218589bc8ab2b13139a25e8ee165b --- /dev/null +++ b/videos/Airbot_MMK2_storage_egg_white_box.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7e24db8370eff35aa6a38d6424dc2f3330c0dd3a6572a8094b483296a7ee3188 +size 324299 diff --git a/videos/Airbot_MMK2_storage_milk_tissue.mp4 b/videos/Airbot_MMK2_storage_milk_tissue.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..b34847103f8b620cca06962aba033892539de19e --- /dev/null +++ b/videos/Airbot_MMK2_storage_milk_tissue.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:513e7368348532e329ef45a51911a0eadd231ff5b66bb3f41b6781960499b291 +size 700597 diff --git a/videos/Airbot_MMK2_storage_onion_sweet_potato.mp4 b/videos/Airbot_MMK2_storage_onion_sweet_potato.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..352a13bd8da24e7ac1ad623d15d049f8de497e5f --- /dev/null +++ b/videos/Airbot_MMK2_storage_onion_sweet_potato.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5f663953a4f05070ba2ad3a2c393cfc2e58f17c93f2b613355aa32f378757cd4 +size 384859 diff --git a/videos/Airbot_MMK2_unscrew_bottle_cap.mp4 b/videos/Airbot_MMK2_unscrew_bottle_cap.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..1706cdc6dafa1990525e6f507fb4e4b51a636016 --- /dev/null +++ b/videos/Airbot_MMK2_unscrew_bottle_cap.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1ffea90d38068a91eb67d6b1a7ed41603f090ddeeb0ce04f54e96f80cf401ae4 +size 464824 diff --git a/videos/Galaxea_R1_Lite_pour_powder_marble_bar_counter.mp4 b/videos/Galaxea_R1_Lite_pour_powder_marble_bar_counter.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..b3ac24b90da4ca832dce9b1b5e5e945c45ab4014 --- /dev/null +++ b/videos/Galaxea_R1_Lite_pour_powder_marble_bar_counter.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bfc16f7a1b7621c9a10adb6e0d023a9c8bcb6b691679cfd649ce50d0405bb55f +size 2154133 diff --git a/videos/agilex_cobot_magic_pass_object_left_to_right_white_tablecloth.mp4 b/videos/agilex_cobot_magic_pass_object_left_to_right_white_tablecloth.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..fa364ab685a90150e0df2c254ff1ef23cebf02d1 --- /dev/null +++ b/videos/agilex_cobot_magic_pass_object_left_to_right_white_tablecloth.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a52ca62e2046a33c9312f81efdf65e96c225ba12fbf4f569e793dbfafa9ad334 +size 899200